Skip to content

Commit

Permalink
feat: encoded blobs
Browse files Browse the repository at this point in the history
  • Loading branch information
Maddiaa0 committed Jan 17, 2025
1 parent f9745ad commit 3072aa3
Show file tree
Hide file tree
Showing 9 changed files with 332 additions and 260 deletions.
11 changes: 3 additions & 8 deletions yarn-project/blob-sink/src/client/blob-sink-client-tests.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Blob } from '@aztec/foundation/blob';
import { Blob, makeEncodedBlob } from '@aztec/foundation/blob';
import { Fr } from '@aztec/foundation/fields';

import { type BlobSinkClientInterface } from './interface.js';
Expand All @@ -25,8 +25,7 @@ export function runBlobSinkClientTests(
});

it('should send and retrieve blobs', async () => {
const testFields = [Fr.random(), Fr.random(), Fr.random()];
const blob = Blob.fromFields(testFields);
const blob = makeEncodedBlob(3);
const blockId = '0x1234';

const success = await client.sendBlobsToBlobSink(blockId, [blob]);
Expand All @@ -39,11 +38,7 @@ export function runBlobSinkClientTests(
});

it('should handle multiple blobs', async () => {
const blobs = [
Blob.fromFields([Fr.random(), Fr.random()]),
Blob.fromFields([Fr.random(), Fr.random()]),
Blob.fromFields([Fr.random(), Fr.random()]),
];
const blobs = [makeEncodedBlob(2), makeEncodedBlob(2), makeEncodedBlob(2)];
const blockId = '0x5678';

const success = await client.sendBlobsToBlobSink(blockId, blobs);
Expand Down
7 changes: 3 additions & 4 deletions yarn-project/blob-sink/src/client/http.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Blob } from '@aztec/foundation/blob';
import { Blob, makeEncodedBlob } from '@aztec/foundation/blob';
import { Fr } from '@aztec/foundation/fields';

import { jest } from '@jest/globals';
Expand Down Expand Up @@ -53,7 +53,7 @@ describe('HttpBlobSinkClient', () => {
const MOCK_SLOT_NUMBER = 1;

beforeEach(() => {
testBlob = Blob.fromFields([Fr.random()]);
testBlob = makeEncodedBlob(3);
});

const startExecutionHostServer = (): Promise<void> => {
Expand Down Expand Up @@ -156,9 +156,8 @@ describe('HttpBlobSinkClient', () => {
l1RpcUrl: `http://localhost:${executionHostPort}`,
l1ConsensusHostUrl: `http://localhost:${consensusHostPort}`,
});
const blob = Blob.fromFields([Fr.random()]);

const success = await client.sendBlobsToBlobSink('0x1234', [blob]);
const success = await client.sendBlobsToBlobSink('0x1234', [testBlob]);
expect(success).toBe(true);

const retrievedBlobs = await client.getBlobSidecar('0x1234');
Expand Down
18 changes: 8 additions & 10 deletions yarn-project/blob-sink/src/server/server.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Blob } from '@aztec/foundation/blob';
import { Blob, makeEncodedBlob } from '@aztec/foundation/blob';
import { Fr } from '@aztec/foundation/fields';

import request from 'supertest';
Expand All @@ -20,10 +20,8 @@ describe('BlobSinkService', () => {
});

describe('should store and retrieve a blob sidecar', () => {
const testFields = [Fr.random(), Fr.random(), Fr.random()];
const testFields2 = [Fr.random(), Fr.random(), Fr.random()];
const blob = Blob.fromFields(testFields);
const blob2 = Blob.fromFields(testFields2);
const blob = makeEncodedBlob(3);
const blob2 = makeEncodedBlob(3);
const blockId = '0x1234';

beforeEach(async () => {
Expand Down Expand Up @@ -56,9 +54,9 @@ describe('BlobSinkService', () => {

// Convert the response blob back to a Blob object and verify it matches
const retrievedBlobs = getResponse.body.data;
const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));

const retrievedBlob = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));
expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString());
expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex'));
expect(retrievedBlob.proof.toString('hex')).toBe(blob.proof.toString('hex'));
Expand All @@ -78,8 +76,8 @@ describe('BlobSinkService', () => {
expect(getWithIndicies.body.data.length).toBe(2);

const retrievedBlobs = getWithIndicies.body.data;
const retrievedBlob = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));
const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob2 = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[1].blob.slice(2), 'hex'));
expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString());
expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex'));
expect(retrievedBlob.proof.toString('hex')).toBe(blob.proof.toString('hex'));
Expand All @@ -96,7 +94,7 @@ describe('BlobSinkService', () => {
expect(getWithIndicies.body.data.length).toBe(1);

const retrievedBlobs = getWithIndicies.body.data;
const retrievedBlob = Blob.fromBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
const retrievedBlob = Blob.fromEncodedBlobBuffer(Buffer.from(retrievedBlobs[0].blob.slice(2), 'hex'));
expect(retrievedBlob.fieldsHash.toString()).toBe(blob2.fieldsHash.toString());
expect(retrievedBlob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex'));
expect(retrievedBlob.proof.toString('hex')).toBe(blob2.proof.toString('hex'));
Expand Down
7 changes: 1 addition & 6 deletions yarn-project/circuit-types/src/tx_effect.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ import {
REVERT_CODE_PREFIX,
RevertCode,
TX_FEE_PREFIX,
TX_START_PREFIX,
UNENCRYPTED_LOGS_PREFIX,
} from '@aztec/circuits.js';
import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
import { TX_EFFECT_PREFIX_BYTE_LENGTH, TX_START_PREFIX, TX_START_PREFIX_BYTES_LENGTH } from '@aztec/foundation/blob';
import { padArrayEnd } from '@aztec/foundation/collection';
import { sha256Trunc } from '@aztec/foundation/crypto';
import { jsonStringify } from '@aztec/foundation/json-rpc';
Expand All @@ -42,11 +42,6 @@ import { TxHash } from './tx/tx_hash.js';

export { RevertCodeEnum } from '@aztec/circuits.js';

// These are helper constants to decode tx effects from blob encoded fields
const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2;
// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode |
const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7;

export class TxEffect {
constructor(
/**
Expand Down
200 changes: 200 additions & 0 deletions yarn-project/foundation/src/blob/blob.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err:
import cKzg from 'c-kzg';
import type { Blob as BlobBuffer } from 'c-kzg';

import { poseidon2Hash, sha256 } from '../crypto/index.js';
import { Fr } from '../fields/index.js';
import { BufferReader, serializeToBuffer } from '../serialize/index.js';
import { deserializeEncodedBlobFields } from './encoding.js';

/* eslint-disable import/no-named-as-default-member */
const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg;

// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
export const VERSIONED_HASH_VERSION_KZG = 0x01;

/**
* A class to create, manage, and prove EVM blobs.
*/
export class Blob {
constructor(
/** The blob to be broadcast on L1 in bytes form. */
public readonly data: BlobBuffer,
/** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
public readonly fieldsHash: Fr,
/** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */
public readonly challengeZ: Fr,
/** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */
public readonly evaluationY: Buffer,
/** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
public readonly commitment: Buffer,
/** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */
public readonly proof: Buffer,
) {}

static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Blob {
const fields: Fr[] = deserializeEncodedBlobFields(blob);
return Blob.fromFields(fields, multiBlobFieldsHash);
}

static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob {
if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
throw new Error(
`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`,
);
}

const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);

// This matches the output of SpongeBlob.squeeze() in the blob circuit
const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields);
const commitment = Buffer.from(blobToKzgCommitment(data));
const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]);
const res = computeKzgProof(data, challengeZ.toBuffer());
if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) {
throw new Error(`KZG proof did not verify.`);
}
const proof = Buffer.from(res[0]);
const evaluationY = Buffer.from(res[1]);

return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof);
}

// TODO: add unit test
static fromJson(json: { blob: string; kzg_commitment: string; kzg_proof: string }): Blob {
const blobBuffer = Buffer.from(json.blob.slice(2), 'hex');

const blob = Blob.fromEncodedBlobBuffer(blobBuffer);

if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) {
throw new Error('KZG commitment does not match');
}

return blob;
}

// TODO: think if naming should change for encoded / non encoded blob payloads
toFields(): Fr[] {
return deserializeEncodedBlobFields(this.data);
}

toEncodedFields(): Fr[] {
return deserializeEncodedBlobFields(this.data);
}

// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
commitmentToFields(): [Fr, Fr] {
return commitmentToFields(this.commitment);
}

// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
getEthVersionedBlobHash(): Buffer {
const hash = sha256(this.commitment);
hash[0] = VERSIONED_HASH_VERSION_KZG;
return hash;
}

static getEthVersionedBlobHash(commitment: Buffer): Buffer {
const hash = sha256(commitment);
hash[0] = VERSIONED_HASH_VERSION_KZG;
return hash;
}

toBuffer(): Buffer {
return Buffer.from(
serializeToBuffer(
this.data.length,
this.data,
this.fieldsHash,
this.challengeZ,
this.evaluationY.length,
this.evaluationY,
this.commitment.length,
this.commitment,
this.proof.length,
this.proof,
),
);
}

static fromBuffer(buf: Buffer | BufferReader): Blob {
const reader = BufferReader.asReader(buf);
return new Blob(
reader.readUint8Array(),
reader.readObject(Fr),
reader.readObject(Fr),
reader.readBuffer(),
reader.readBuffer(),
reader.readBuffer(),
);
}

/**
* Get the size of the blob in bytes
*/
getSize() {
return this.data.length;
}

// Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
// * input[:32] - versioned_hash
// * input[32:64] - z
// * input[64:96] - y
// * input[96:144] - commitment C
// * input[144:192] - proof (a commitment to the quotient polynomial q(X))
// See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
getEthBlobEvaluationInputs(): `0x${string}` {
const buf = Buffer.concat([
this.getEthVersionedBlobHash(),
this.challengeZ.toBuffer(),
this.evaluationY,
this.commitment,
this.proof,
]);
return `0x${buf.toString('hex')}`;
}

static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` {
let buf = Buffer.alloc(0);
blobs.forEach(blob => {
buf = Buffer.concat([
buf,
blob.getEthVersionedBlobHash(),
blob.challengeZ.toBuffer(),
blob.evaluationY,
blob.commitment,
blob.proof,
]);
});
// For multiple blobs, we prefix the number of blobs:
const lenBuf = Buffer.alloc(1);
lenBuf.writeUint8(blobs.length);
buf = Buffer.concat([lenBuf, buf]);
return `0x${buf.toString('hex')}`;
}

static getViemKzgInstance() {
return {
blobToKzgCommitment: cKzg.blobToKzgCommitment,
computeBlobKzgProof: cKzg.computeBlobKzgProof,
};
}

// Returns as many blobs as we require to broadcast the given fields
// Assumes we share the fields hash between all blobs
static getBlobs(fields: Fr[]): Blob[] {
const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
const multiBlobFieldsHash = poseidon2Hash(fields);
const res = [];
for (let i = 0; i < numBlobs; i++) {
const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
}
return res;
}
}

// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
function commitmentToFields(commitment: Buffer): [Fr, Fr] {
return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))];
}
Loading

0 comments on commit 3072aa3

Please sign in to comment.