Skip to content

Commit

Permalink
feat: encoded blobs
Browse files Browse the repository at this point in the history
  • Loading branch information
Maddiaa0 committed Jan 17, 2025
1 parent b43f6e6 commit 3c416c2
Show file tree
Hide file tree
Showing 7 changed files with 326 additions and 248 deletions.
18 changes: 8 additions & 10 deletions yarn-project/blob-sink/src/server/server.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Blob } from '@aztec/foundation/blob';
import { Blob, makeEncodedBlob } from '@aztec/foundation/blob';
import { Fr } from '@aztec/foundation/fields';

import request from 'supertest';
Expand All @@ -20,10 +20,8 @@ describe('BlobSinkService', () => {
});

describe('should store and retrieve a blob sidecar', () => {
const testFields = [Fr.random(), Fr.random(), Fr.random()];
const testFields2 = [Fr.random(), Fr.random(), Fr.random()];
const blob = Blob.fromFields(testFields);
const blob2 = Blob.fromFields(testFields2);
const blob = makeEncodedBlob(3);
const blob2 = makeEncodedBlob(3);
const blockId = '0x1234';

beforeEach(async () => {
Expand Down Expand Up @@ -56,9 +54,9 @@ describe('BlobSinkService', () => {

// Convert the response blob back to a Blob object and verify it matches
const retrievedBlobs = getResponse.body.data;
const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));

const retrievedBlob = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));
expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString());
expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex'));
expect(retrievedBlob.proof.toString('hex')).toBe(blob.proof.toString('hex'));
Expand All @@ -78,8 +76,8 @@ describe('BlobSinkService', () => {
expect(getWithIndicies.body.data.length).toBe(2);

const retrievedBlobs = getWithIndicies.body.data;
const retrievedBlob = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));
const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));
expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString());
expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex'));
expect(retrievedBlob.proof.toString('hex')).toBe(blob.proof.toString('hex'));
Expand All @@ -96,7 +94,7 @@ describe('BlobSinkService', () => {
expect(getWithIndicies.body.data.length).toBe(1);

const retrievedBlobs = getWithIndicies.body.data;
const retrievedBlob = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
expect(retrievedBlob.fieldsHash.toString()).toBe(blob2.fieldsHash.toString());
expect(retrievedBlob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex'));
expect(retrievedBlob.proof.toString('hex')).toBe(blob2.proof.toString('hex'));
Expand Down
7 changes: 1 addition & 6 deletions yarn-project/circuit-types/src/tx_effect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ import {
REVERT_CODE_PREFIX,
RevertCode,
TX_FEE_PREFIX,
TX_START_PREFIX,
UNENCRYPTED_LOGS_PREFIX,
} from '@aztec/circuits.js';
import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
import { TX_EFFECT_PREFIX_BYTE_LENGTH, TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from '@aztec/foundation/blob';
import { padArrayEnd } from '@aztec/foundation/collection';
import { sha256Trunc } from '@aztec/foundation/crypto';
import { jsonStringify } from '@aztec/foundation/json-rpc';
Expand All @@ -42,11 +42,6 @@ import { TxHash } from './tx/tx_hash.js';

export { RevertCodeEnum } from '@aztec/circuits.js';

// These are helper constants to decode tx effects from blob encoded fields
const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;

export class TxEffect {
constructor(
/**
Expand Down
200 changes: 200 additions & 0 deletions yarn-project/foundation/src/blob/blob.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err:
import cKzg from 'c-kzg';
import type { Blob as BlobBuffer } from 'c-kzg';

import { poseidon2Hash, sha256 } from '../crypto/index.js';
import { Fr } from '../fields/index.js';
import { BufferReader, serializeToBuffer } from '../serialize/index.js';
import { deserializeEncodedBlobFields } from './encoding.js';

/* eslint-disable import/no-named-as-default-member */
const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg;

// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
export const VERSIONED_HASH_VERSION_KZG = 0x01;

/**
* A class to create, manage, and prove EVM blobs.
*/
export class Blob {
constructor(
/** The blob to be broadcast on L1 in bytes form. */
public readonly data: BlobBuffer,
/** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
public readonly fieldsHash: Fr,
/** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */
public readonly challengeZ: Fr,
/** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */
public readonly evaluationY: Buffer,
/** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
public readonly commitment: Buffer,
/** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */
public readonly proof: Buffer,
) {}

static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Blob {
const fields: Fr[] = deserializeEncodedBlobFields(blob);
return Blob.fromFields(fields, multiBlobFieldsHash);
}

static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob {
if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
throw new Error(
`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`,
);
}

const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);

// This matches the output of SpongeBlob.squeeze() in the blob circuit
const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields);
const commitment = Buffer.from(blobToKzgCommitment(data));
const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]);
const res = computeKzgProof(data, challengeZ.toBuffer());
if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) {
throw new Error(`KZG proof did not verify.`);
}
const proof = Buffer.from(res[0]);
const evaluationY = Buffer.from(res[1]);

return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof);
}

// TODO: add unit test
static fromJson(json: { blob: string; kzg_commitment: string; kzg_proof: string }): Blob {
const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');

const blob = Blob.fromEncodedBlobBuffer(blobBuffer);

if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
throw new Error('KZG commitment does not match');
}

return blob;
}

// TODO: think if naming should change for encoded / non encoded blob payloads
toFields(): Fr[] {
return deserializeEncodedBlobFields(this.data);
}

toEncodedFields(): Fr[] {
return deserializeEncodedBlobFields(this.data);
}

// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
commitmentToFields(): [Fr, Fr] {
return commitmentToFields(this.commitment);
}

// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
getEthVersionedBlobHash(): Buffer {
const hash = sha256(this.commitment);
hash[0] = VERSIONED_HASH_VERSION_KZG;
return hash;
}

static getEthVersionedBlobHash(commitment: Buffer): Buffer {
const hash = sha256(commitment);
hash[0] = VERSIONED_HASH_VERSION_KZG;
return hash;
}

toBuffer(): Buffer {
return Buffer.from(
serializeToBuffer(
this.data.length,
this.data,
this.fieldsHash,
this.challengeZ,
this.evaluationY.length,
this.evaluationY,
this.commitment.length,
this.commitment,
this.proof.length,
this.proof,
),
);
}

static fromBuffer(buf: Buffer | BufferReader): Blob {
const reader = BufferReader.asReader(buf);
return new Blob(
reader.readUint8Array(),
reader.readObject(Fr),
reader.readObject(Fr),
reader.readBuffer(),
reader.readBuffer(),
reader.readBuffer(),
);
}

/**
* Get the size of the blob in bytes
*/
getSize() {
return this.data.length;
}

// Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
// * input[:32] - versioned_hash
// * input[32:64] - z
// * input[64:96] - y
// * input[96:144] - commitment C
// * input[144:192] - proof (a commitment to the quotient polynomial q(X))
// See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
getEthBlobEvaluationInputs(): `0x${string}` {
const buf = Buffer.concat([
this.getEthVersionedBlobHash(),
this.challengeZ.toBuffer(),
this.evaluationY,
this.commitment,
this.proof,
]);
return `0x${buf.toString('hex')}`;
}

static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` {
let buf = Buffer.alloc(0);
blobs.forEach(blob => {
buf = Buffer.concat([
buf,
blob.getEthVersionedBlobHash(),
blob.challengeZ.toBuffer(),
blob.evaluationY,
blob.commitment,
blob.proof,
]);
});
// For multiple blobs, we prefix the number of blobs:
const lenBuf = Buffer.alloc(1);
lenBuf.writeUint8(blobs.length);
buf = Buffer.concat([lenBuf, buf]);
return `0x${buf.toString('hex')}`;
}

static getViemKzgInstance() {
return {
blobToKzgCommitment: cKzg.blobToKzgCommitment,
computeBlobKzgProof: cKzg.computeBlobKzgProof,
};
}

// Returns as many blobs as we require to broadcast the given fields
// Assumes we share the fields hash between all blobs
static getBlobs(fields: Fr[]): Blob[] {
const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
const multiBlobFieldsHash = poseidon2Hash(fields);
const res = [];
for (let i = 0; i < numBlobs; i++) {
const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
}
return res;
}
}

// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
function commitmentToFields(commitment: Buffer): [Fr, Fr] {
return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))];
}
71 changes: 71 additions & 0 deletions yarn-project/foundation/src/blob/encoding.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { Fr } from '@aztec/foundation/fields';
import { BufferReader, FieldReader } from '@aztec/foundation/serialize';

import type { Blob as BlobBuffer } from 'c-kzg';

export const TX_START_PREFIX = 8392562855083340404n;
// These are helper constants to decode tx effects from blob encoded fields
export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;

/**
* Deserializes a blob buffer into an array of field elements.
*
* Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash).
* This method is sparse, meaning it does not include trailing zeros at the end of the blob.
*
* However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's
* within them.
* If we end on a set of zeros, such as the log below:
* length 7: [ a, b, c, d, e, 0, 0]
*
* we will end up with the incorrect hash if we trim the zeros from the end.
*
* Each transactions logs contains a TX start prefix, which includes a string followed
* by the length ( in field elements ) of the transaction's log.
*
* This function finds the end of the last transaction's logs, and returns the array up to this point.
*
* We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit
* a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs.
*
* +------------------+------------------+------------------+------------------+
* | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros |
* | [3 a,b,c] | [3, a, b, c] | [4 d,e,f,g] | [0, 0, 0, .., 0] |
* +------------------+------------------+------------------+------------------+
* ^
* |
* Function reads until here --------------------------------
*
* @param blob - The blob buffer to deserialize.
* @returns An array of field elements.
*/
export function deserializeEncodedBlobFields(blob: BlobBuffer): Fr[] {
// Convert blob buffer to array of field elements
const reader = BufferReader.asReader(blob);
const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field)
const fieldReader = FieldReader.asReader(array);

// Read fields until we hit zeros at the end
while (!fieldReader.isFinished()) {
const currentField = fieldReader.peekField();

// Stop when we hit a zero field
if (currentField.isZero()) {
break;
}

// Skip the remaining fields in this transaction
const len = getLengthFromFirstField(currentField);
fieldReader.skip(len);
}

// Return array up to last non-zero field
return array.slice(0, fieldReader.cursor);
}

export function getLengthFromFirstField(firstField: Fr): number {
const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH);
return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber();
}
Loading

0 comments on commit 3c416c2

Please sign in to comment.