diff --git a/yarn-project/blob-sink/.eslintrc.cjs b/yarn-project/blob-sink/.eslintrc.cjs new file mode 100644 index 00000000000..e659927475c --- /dev/null +++ b/yarn-project/blob-sink/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/blob-sink/README.md b/yarn-project/blob-sink/README.md new file mode 100644 index 00000000000..32d9b9d59b3 --- /dev/null +++ b/yarn-project/blob-sink/README.md @@ -0,0 +1,7 @@ +## Blob Sink + +A HTTP api that emulated the https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/getBlobSidecars API. + +## When is this used? + +This service will run alongside end to end tests to capture the blob transactions that are sent alongside a `propose` transaction. \ No newline at end of file diff --git a/yarn-project/blob-sink/package.json b/yarn-project/blob-sink/package.json new file mode 100644 index 00000000000..20d23507222 --- /dev/null +++ b/yarn-project/blob-sink/package.json @@ -0,0 +1,89 @@ +{ + "name": "@aztec/blob-sink", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js" + }, + "inherits": [ + "../package.common.json" + ], + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ], + "testTimeout": 30000, + "setupFiles": [ + "../../foundation/src/jest/setup.mjs" + ] + }, + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/foundation": "workspace:^", + "@aztec/kv-store": "workspace:*", + "@aztec/telemetry-client": "workspace:*", + "express": "^4.21.1", + "source-map-support": "^0.5.21", + "tslib": "^2.4.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/memdown": "^3.0.0", + "@types/node": "^18.7.23", + "@types/source-map-support": "^0.5.10", + "@types/supertest": "^6.0.2", + "jest": "^29.5.0", + "jest-mock-extended": "^3.0.3", + "supertest": "^7.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/blob-sink/src/blob-sink.test.ts b/yarn-project/blob-sink/src/blob-sink.test.ts new file mode 100644 index 00000000000..03f240edc04 --- /dev/null +++ b/yarn-project/blob-sink/src/blob-sink.test.ts @@ -0,0 +1,91 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import request from 'supertest'; + +import { BlobSinkServer } from './server.js'; + +describe('BlobSinkService', () => { + let service: BlobSinkServer; + + beforeEach(async () => { + service = new BlobSinkServer({ + port: 0, // Using port 0 lets the OS assign a random available port + }); + await service.start(); + }); + + afterEach(async () => { + await service.stop(); + }); + + it('should store and retrieve a blob sidecar', async () => { + // Create a test blob + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '0x1234'; + + // Post the blob + const postResponse = await request(service.getApp()) + .post('/blob_sidecar') + .send({ + // eslint-disable-next-line camelcase + block_id: blockId, + blobs: [ + { + index: 0, + blob: blob.toBuffer(), + }, + ], + }); + + expect(postResponse.status).toBe(200); + + // Retrieve the blob + const getResponse = await request(service.getApp()).get(`/eth/v1/beacon/blob_sidecars/${blockId}`); + + expect(getResponse.status).toBe(200); + + // Convert the response blob back to a Blob object and verify it matches + const retrievedBlobs = getResponse.body.data; + + const retrievedBlob = Blob.fromBuffer(Buffer.from(retrievedBlobs[0].blob, 'hex')); + expect(retrievedBlob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('should return an error if the block ID is invalid (POST)', async () => { + const response = await request(service.getApp()).post('/blob_sidecar').send({ + // eslint-disable-next-line camelcase + block_id: undefined, + }); + + expect(response.status).toBe(400); + }); + + it('should return an error if the block ID is invalid (GET)', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + }); + + it('should return 404 for non-existent blob', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/0x999999'); + + expect(response.status).toBe(404); + }); + + it('should reject invalid block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/invalid-id'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); + + it('should reject negative block IDs', async () => { + const response = await request(service.getApp()).get('/eth/v1/beacon/blob_sidecars/-123'); + + expect(response.status).toBe(400); + expect(response.body.error).toBe('Invalid block_id parameter'); + }); +}); diff --git a/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts new file mode 100644 index 00000000000..28c851f003d --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/blob_store_test_suite.ts @@ -0,0 +1,94 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export function describeBlobStore(getBlobStore: () => BlobStore) { + let blobStore: BlobStore; + + beforeEach(() => { + blobStore = getBlobStore(); + }); + + it('should store and retrieve a blob', async () => { + // Create a test blob with random fields + const testFields = [Fr.random(), Fr.random(), Fr.random()]; + const blob = Blob.fromFields(testFields); + const blockId = '12345'; + const blobWithIndex = new BlobWithIndex(blob, 0); + + // Store the blob + await blobStore.addBlobSidecars(blockId, [blobWithIndex]); + + // Retrieve the blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + + // Verify the blob was retrieved and matches + expect(retrievedBlob).toBeDefined(); + expect(retrievedBlob.blob.fieldsHash.toString()).toBe(blob.fieldsHash.toString()); + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(blob.commitment.toString('hex')); + }); + + it('should return undefined for non-existent blob', async () => { + const nonExistentBlob = await blobStore.getBlobSidecars('999999'); + expect(nonExistentBlob).toBeUndefined(); + }); + + it('should handle multiple blobs with different block IDs', async () => { + // Create two different blobs + const blob1 = Blob.fromFields([Fr.random(), Fr.random()]); + const blob2 = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + // Store both blobs + await blobStore.addBlobSidecars('1', [blobWithIndex1]); + await blobStore.addBlobSidecars('2', [blobWithIndex2]); + + // Retrieve and verify both blobs + const retrieved1 = await blobStore.getBlobSidecars('1'); + const retrieved2 = await blobStore.getBlobSidecars('2'); + const [retrievedBlob1] = retrieved1!; + const [retrievedBlob2] = retrieved2!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); + + it('should overwrite blob when using same block ID', async () => { + // Create two different blobs + const originalBlob = Blob.fromFields([Fr.random()]); + const newBlob = Blob.fromFields([Fr.random(), Fr.random()]); + const blockId = '1'; + const originalBlobWithIndex = new BlobWithIndex(originalBlob, 0); + const newBlobWithIndex = new BlobWithIndex(newBlob, 0); + + // Store original blob + await blobStore.addBlobSidecars(blockId, [originalBlobWithIndex]); + + // Overwrite with new blob + await blobStore.addBlobSidecars(blockId, [newBlobWithIndex]); + + // Retrieve and verify it's the new blob + const retrievedBlobs = await blobStore.getBlobSidecars(blockId); + const [retrievedBlob] = retrievedBlobs!; + expect(retrievedBlob.blob.commitment.toString('hex')).toBe(newBlob.commitment.toString('hex')); + expect(retrievedBlob.blob.commitment.toString('hex')).not.toBe(originalBlob.commitment.toString('hex')); + }); + + it('should handle multiple blobs with the same block ID', async () => { + const blob1 = Blob.fromFields([Fr.random()]); + const blob2 = Blob.fromFields([Fr.random()]); + const blobWithIndex1 = new BlobWithIndex(blob1, 0); + const blobWithIndex2 = new BlobWithIndex(blob2, 0); + + await blobStore.addBlobSidecars('1', [blobWithIndex1, blobWithIndex2]); + const retrievedBlobs = await blobStore.getBlobSidecars('1'); + const [retrievedBlob1, retrievedBlob2] = retrievedBlobs!; + + expect(retrievedBlob1.blob.commitment.toString('hex')).toBe(blob1.commitment.toString('hex')); + expect(retrievedBlob2.blob.commitment.toString('hex')).toBe(blob2.commitment.toString('hex')); + }); +} diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts new file mode 100644 index 00000000000..8b523dbaef1 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.test.ts @@ -0,0 +1,8 @@ +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { describeBlobStore } from './blob_store_test_suite.js'; +import { DiskBlobStore } from './disk_blob_store.js'; + +describe('DiskBlobStore', () => { + describeBlobStore(() => new DiskBlobStore(openTmpStore())); +}); diff --git a/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts new file mode 100644 index 00000000000..34ac8e2ec73 --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/disk_blob_store.ts @@ -0,0 +1,25 @@ +import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; + +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class DiskBlobStore implements BlobStore { + blobs: AztecMap; + + constructor(store: AztecKVStore) { + this.blobs = store.openMap('blobs'); + } + + public getBlobSidecars(blockId: string): Promise { + const blobBuffer = this.blobs.get(`${blockId}`); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + return Promise.resolve(BlobsWithIndexes.fromBuffer(blobBuffer).blobs); + } + + public async addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + await this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/blobstore/index.ts b/yarn-project/blob-sink/src/blobstore/index.ts new file mode 100644 index 00000000000..fd3901930cf --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/index.ts @@ -0,0 +1,3 @@ +export * from './memory_blob_store.js'; +export * from './disk_blob_store.js'; +export * from './interface.js'; diff --git a/yarn-project/blob-sink/src/blobstore/interface.ts b/yarn-project/blob-sink/src/blobstore/interface.ts new file mode 100644 index 00000000000..9a6e3d26d8c --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/interface.ts @@ -0,0 +1,12 @@ +import { type BlobWithIndex } from '../types/index.js'; + +export interface BlobStore { + /** + * Get a blob by block id + */ + getBlobSidecars: (blockId: string) => Promise; + /** + * Add a blob to the store + */ + addBlobSidecars: (blockId: string, blobSidecars: BlobWithIndex[]) => Promise; +} diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts new file mode 100644 index 00000000000..2f13926cd1a --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.test.ts @@ -0,0 +1,6 @@ +import { describeBlobStore } from './blob_store_test_suite.js'; +import { MemoryBlobStore } from './memory_blob_store.js'; + +describe('MemoryBlobStore', () => { + describeBlobStore(() => new MemoryBlobStore()); +}); diff --git a/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts new file mode 100644 index 00000000000..23ed7274edb --- /dev/null +++ b/yarn-project/blob-sink/src/blobstore/memory_blob_store.ts @@ -0,0 +1,19 @@ +import { type BlobWithIndex, BlobsWithIndexes } from '../types/index.js'; +import { type BlobStore } from './interface.js'; + +export class MemoryBlobStore implements BlobStore { + private blobs: Map = new Map(); + + public getBlobSidecars(blockId: string): Promise { + const blobBuffer = this.blobs.get(blockId); + if (!blobBuffer) { + return Promise.resolve(undefined); + } + return Promise.resolve(BlobsWithIndexes.fromBuffer(blobBuffer).blobs); + } + + public addBlobSidecars(blockId: string, blobSidecars: BlobWithIndex[]): Promise { + this.blobs.set(blockId, new BlobsWithIndexes(blobSidecars).toBuffer()); + return Promise.resolve(); + } +} diff --git a/yarn-project/blob-sink/src/config.ts b/yarn-project/blob-sink/src/config.ts new file mode 100644 index 00000000000..e18311f9f1d --- /dev/null +++ b/yarn-project/blob-sink/src/config.ts @@ -0,0 +1,7 @@ +import { type DataStoreConfig } from '@aztec/kv-store/config'; + +export interface BlobSinkConfig { + port?: number; + dataStoreConfig?: DataStoreConfig; + otelMetricsCollectorUrl?: string; +} diff --git a/yarn-project/blob-sink/src/factory.ts b/yarn-project/blob-sink/src/factory.ts new file mode 100644 index 00000000000..3e04e6aae79 --- /dev/null +++ b/yarn-project/blob-sink/src/factory.ts @@ -0,0 +1,25 @@ +import { type AztecKVStore } from '@aztec/kv-store'; +import { createStore } from '@aztec/kv-store/lmdb'; + +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkServer } from './server.js'; + +// If data store settings are provided, the store is created and returned. +// Otherwise, undefined is returned and an in memory store will be used. +async function getDataStoreConfig(config?: BlobSinkConfig): Promise { + if (!config?.dataStoreConfig) { + return undefined; + } + return await createStore('blob-sink', config.dataStoreConfig); +} + +// TOOD: telemetry client config too + +/** + * Creates a blob sink service from the provided config. + */ +export async function createBlobSinkServer(config?: BlobSinkConfig): Promise { + const store = await getDataStoreConfig(config); + + return new BlobSinkServer(config, store); +} diff --git a/yarn-project/blob-sink/src/index.ts b/yarn-project/blob-sink/src/index.ts new file mode 100644 index 00000000000..25844130c2f --- /dev/null +++ b/yarn-project/blob-sink/src/index.ts @@ -0,0 +1,3 @@ +export * from './server.js'; +export * from './config.js'; +export * from './factory.js'; diff --git a/yarn-project/blob-sink/src/metrics.ts b/yarn-project/blob-sink/src/metrics.ts new file mode 100644 index 00000000000..fae9efc7ef6 --- /dev/null +++ b/yarn-project/blob-sink/src/metrics.ts @@ -0,0 +1,27 @@ +import { type Histogram, Metrics, type TelemetryClient, type UpDownCounter } from '@aztec/telemetry-client'; + +import { type BlobWithIndex } from './types/blob_with_index.js'; + +export class BlobSinkMetrics { + /** The number of blobs in the blob store */ + private objectsInBlobStore: UpDownCounter; + + /** Tracks blob size */ + private blobSize: Histogram; + + constructor(telemetry: TelemetryClient) { + const name = 'BlobSink'; + this.objectsInBlobStore = telemetry.getMeter(name).createUpDownCounter(Metrics.BLOB_SINK_OBJECTS_IN_BLOB_STORE, { + description: 'The current number of blobs in the blob store', + }); + + this.blobSize = telemetry.getMeter(name).createHistogram(Metrics.BLOB_SINK_BLOB_SIZE, { + description: 'The size of blobs in the blob store', + }); + } + + public recordBlobReciept(blobs: BlobWithIndex[]) { + this.objectsInBlobStore.add(blobs.length); + blobs.forEach(b => this.blobSize.record(b.blob.getSize())); + } +} diff --git a/yarn-project/blob-sink/src/server.ts b/yarn-project/blob-sink/src/server.ts new file mode 100644 index 00000000000..a3e30f1d37e --- /dev/null +++ b/yarn-project/blob-sink/src/server.ts @@ -0,0 +1,166 @@ +import { Blob } from '@aztec/foundation/blob'; +import { type Logger, createLogger } from '@aztec/foundation/log'; +import { type AztecKVStore } from '@aztec/kv-store'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +import express, { type Express, type Request, type Response, json } from 'express'; +import { type Server } from 'http'; +import { z } from 'zod'; + +import { type BlobStore, DiskBlobStore } from './blobstore/index.js'; +import { MemoryBlobStore } from './blobstore/memory_blob_store.js'; +import { type BlobSinkConfig } from './config.js'; +import { BlobSinkMetrics } from './metrics.js'; +import { BlobWithIndex } from './types/index.js'; + +// For now, the block ID is the aztec block ID where the blobs were added. +const blockIdSchema = z.coerce + .string() + .regex(/^0x[0-9a-fA-F]{0,64}$/) + .max(66); + +/** + * Example usage: + * const service = new BlobSinkService({ port: 5052 }); + * await service.start(); + * ... later ... + * await service.stop(); + */ +export class BlobSinkServer { + private app: Express; + private server: Server | null = null; + private blobStore: BlobStore; + private readonly port: number; + private metrics: BlobSinkMetrics; + private log: Logger = createLogger('aztec:blob-sink'); + + constructor(config?: BlobSinkConfig, store?: AztecKVStore, telemetry: TelemetryClient = new NoopTelemetryClient()) { + this.port = config?.port ?? 5052; // 5052 is beacon chain default http port + this.app = express(); + + // Setup middleware + this.app.use(json({ limit: '1mb' })); // Increase the limit to allow for a blob to be sent + + this.metrics = new BlobSinkMetrics(telemetry); + + this.blobStore = store === undefined ? new MemoryBlobStore() : new DiskBlobStore(store); + + // Setup routes + this.setupRoutes(); + } + + private setupRoutes() { + this.app.get('/eth/v1/beacon/blob_sidecars/:block_id', this.handleBlobSidecar.bind(this)); + this.app.post('/blob_sidecar', this.handlePostBlobSidecar.bind(this)); + } + + private async handleBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id } = req.params; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + const blobs = await this.blobStore.getBlobSidecars(parsedBlockId.toString()); + + if (!blobs) { + res.status(404).json({ error: 'Blob not found' }); + return; + } + + res.json({ + version: 'deneb', + data: blobs.map(blob => blob.toJSON()), + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: 'Invalid block_id parameter', + details: error.errors, + }); + } else { + res.status(500).json({ + error: 'Internal server error', + }); + } + } + } + + private async handlePostBlobSidecar(req: Request, res: Response) { + // eslint-disable-next-line camelcase + const { block_id, blobs } = req.body; + + try { + // eslint-disable-next-line camelcase + const parsedBlockId = blockIdSchema.parse(block_id); + if (!parsedBlockId) { + res.status(400).json({ + error: 'Invalid block_id parameter', + }); + return; + } + + this.log.info(`Received blob sidecar for block ${parsedBlockId}`); + + // TODO: tidy up the blob parsing + const blobObjects: BlobWithIndex[] = blobs.map( + (b: { index: number; blob: { type: string; data: string } }) => + new BlobWithIndex(Blob.fromBuffer(Buffer.from(b.blob.data)), b.index), + ); + + await this.blobStore.addBlobSidecars(parsedBlockId.toString(), blobObjects); + this.metrics.recordBlobReciept(blobObjects); + + this.log.info(`Blob sidecar stored successfully for block ${parsedBlockId}`); + + res.json({ message: 'Blob sidecar stored successfully' }); + } catch (error) { + res.status(400).json({ + error: 'Invalid blob data', + }); + } + } + + public start(): Promise { + return new Promise(resolve => { + this.server = this.app.listen(this.port, () => { + this.log.info(`Server is running on http://localhost:${this.port}`); + resolve(); + }); + }); + } + + public stop(): Promise { + this.log.info('Stopping blob sink'); + return new Promise((resolve, reject) => { + if (!this.server) { + resolve(); + this.log.info('Blob sink already stopped'); + return; + } + + this.server.close(err => { + if (err) { + reject(err); + return; + } + this.server = null; + this.log.info('Blob sink stopped'); + resolve(); + }); + }); + } + + public getApp(): Express { + return this.app; + } +} diff --git a/yarn-project/blob-sink/src/types/blob_with_index.test.ts b/yarn-project/blob-sink/src/types/blob_with_index.test.ts new file mode 100644 index 00000000000..d29c6b98b88 --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.test.ts @@ -0,0 +1,31 @@ +import { Blob } from '@aztec/foundation/blob'; +import { Fr } from '@aztec/foundation/fields'; + +import { BlobWithIndex, BlobsWithIndexes } from './blob_with_index.js'; + +describe('BlobWithIndex Serde', () => { + it('should serialize and deserialize', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobWithIndex = new BlobWithIndex(blob, 0); + const serialized = blobWithIndex.toBuffer(); + + const deserialized = BlobWithIndex.fromBuffer(serialized); + + expect(blobWithIndex).toEqual(deserialized); + }); +}); + +describe('BlobsWithIndexes Serde', () => { + it('should serialize and deserialize', () => { + const blobs = [ + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 0), + new BlobWithIndex(Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]), 1), + ]; + const blobsWithIndexes = new BlobsWithIndexes(blobs); + + const serialized = blobsWithIndexes.toBuffer(); + const deserialized = BlobsWithIndexes.fromBuffer(serialized); + + expect(deserialized).toEqual(blobsWithIndexes); + }); +}); diff --git a/yarn-project/blob-sink/src/types/blob_with_index.ts b/yarn-project/blob-sink/src/types/blob_with_index.ts new file mode 100644 index 00000000000..b58d3b8b34c --- /dev/null +++ b/yarn-project/blob-sink/src/types/blob_with_index.ts @@ -0,0 +1,47 @@ +import { Blob } from '@aztec/foundation/blob'; +import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; + +/** Serialized an array of blobs with their indexes to be stored at a given block id */ +export class BlobsWithIndexes { + constructor(public blobs: BlobWithIndex[]) {} + + public toBuffer(): Buffer { + return serializeToBuffer(this.blobs.length, this.blobs); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobsWithIndexes { + const reader = BufferReader.asReader(buffer); + return new BlobsWithIndexes(reader.readArray(reader.readNumber(), BlobWithIndex)); + } +} + +/** We store blobs alongside their index in the block */ +export class BlobWithIndex { + constructor( + /** The blob */ + public blob: Blob, + /** The index of the blob in the block */ + public index: number, + ) {} + + public toBuffer(): Buffer { + return serializeToBuffer([this.blob, this.index]); + } + + public static fromBuffer(buffer: Buffer | BufferReader): BlobWithIndex { + const reader = BufferReader.asReader(buffer); + return new BlobWithIndex(reader.readObject(Blob), reader.readNumber()); + } + + // Follows the structure the beacon node api expects + public toJSON(): { blob: string; index: number; kzg_commitment: string; kzg_proof: string } { + return { + blob: this.blob.toBuffer().toString('hex'), + index: this.index, + // eslint-disable-next-line camelcase + kzg_commitment: this.blob.commitment.toString('hex'), + // eslint-disable-next-line camelcase + kzg_proof: this.blob.proof.toString('hex'), + }; + } +} diff --git a/yarn-project/blob-sink/src/types/index.ts b/yarn-project/blob-sink/src/types/index.ts new file mode 100644 index 00000000000..396b8fc805e --- /dev/null +++ b/yarn-project/blob-sink/src/types/index.ts @@ -0,0 +1 @@ +export * from './blob_with_index.js'; diff --git a/yarn-project/blob-sink/tsconfig.json b/yarn-project/blob-sink/tsconfig.json new file mode 100644 index 00000000000..535eabe5863 --- /dev/null +++ b/yarn-project/blob-sink/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../foundation" + }, + { + "path": "../kv-store" + }, + { + "path": "../telemetry-client" + } + ], + "include": ["src"] +} diff --git a/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts b/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts index 9dc9c42d7be..20d3f7e145e 100644 --- a/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts +++ b/yarn-project/circuits.js/src/structs/blob_public_inputs.test.ts @@ -20,7 +20,7 @@ describe('BlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blob = new Blob(Array(400).fill(new Fr(3))); + const blob = Blob.fromFields(Array(400).fill(new Fr(3))); const converted = BlobPublicInputs.fromBlob(blob); expect(converted.z).toEqual(blob.challengeZ); expect(Buffer.from(converted.y.toString(16), 'hex')).toEqual(blob.evaluationY); @@ -55,7 +55,7 @@ describe('BlockBlobPublicInputs', () => { }); it('converts correctly from Blob class', () => { - const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => new Blob(Array(400).fill(new Fr(i + 1)))); + const blobs = Array.from({ length: BLOBS_PER_BLOCK }, (_, i) => Blob.fromFields(Array(400).fill(new Fr(i + 1)))); const converted = BlockBlobPublicInputs.fromBlobs(blobs); converted.inner.forEach((blobPI, i) => { expect(blobPI.z).toEqual(blobs[i].challengeZ); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 4a31d3e2ee1..8d4e80e014f 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -30,6 +30,7 @@ "@aztec/aztec-node": "workspace:^", "@aztec/aztec.js": "workspace:^", "@aztec/bb-prover": "workspace:^", + "@aztec/blob-sink": "workspace:^", "@aztec/bot": "workspace:^", "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index f03231342b9..785324e9714 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -71,6 +71,11 @@ config.l1RpcUrl = config.l1RpcUrl || 'http://127.0.0.1:8545'; const numberOfConsecutiveBlocks = 2; +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + +// TODO(md): THE FIRST PLACE TO TEST THE BLOB SINK E2E IS HERE + describe('L1Publisher integration', () => { let publicClient: PublicClient; let walletClient: WalletClient; @@ -187,6 +192,7 @@ describe('L1Publisher integration', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: config.ethereumSlotDuration, + blobSinkUrl: BLOB_SINK_URL, }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 50857c56baf..e6fb7713ff7 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -383,6 +383,9 @@ describe('e2e_synching', () => { l1ChainId: 31337, viemPollingIntervalMS: 100, ethereumSlotDuration: ETHEREUM_SLOT_DURATION, + + // TODO(md): update + blobSinkUrl: 'http://localhost:5052', }, new NoopTelemetryClient(), ); diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 2acb78f7d47..1e493bf9681 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -14,6 +14,7 @@ import { type Wallet, } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; +import { type BlobSinkServer, createBlobSinkServer } from '@aztec/blob-sink'; import { type DeployL1ContractsArgs, createL1Clients, getL1ContractsConfigEnvVars, l1Artifacts } from '@aztec/ethereum'; import { EthCheatCodesWithState, startAnvil } from '@aztec/ethereum/test'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -29,6 +30,7 @@ import { type Anvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; import fs from 'fs/promises'; +import getPort from 'get-port'; import { tmpdir } from 'os'; import path, { join } from 'path'; import { type Hex, getContract } from 'viem'; @@ -53,6 +55,7 @@ export type SubsystemsContext = { watcher: AnvilTestWatcher; cheatCodes: CheatCodes; dateProvider: TestDateProvider; + blobSink: BlobSinkServer; directoryToCleanup?: string; }; @@ -254,6 +257,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.bbConfig?.cleanup(); await context.anvil.stop(); await context.watcher.stop(); + await context.blobSink.stop(); if (context.directoryToCleanup) { await fs.rm(context.directoryToCleanup, { recursive: true, force: true }); } @@ -278,6 +282,8 @@ async function setupFromFresh( ): Promise { logger.verbose(`Initializing state...`); + const blobSinkPort = await getPort(); + // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = { ...getConfigEnvVars(), ...opts }; @@ -290,6 +296,17 @@ async function setupFromFresh( } else { aztecNodeConfig.dataDirectory = statePath; } + aztecNodeConfig.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + + // Setup blob sink service + const blobSink = await createBlobSinkServer({ + port: blobSinkPort, + dataStoreConfig: { + dataDirectory: aztecNodeConfig.dataDirectory, + dataStoreMapSizeKB: aztecNodeConfig.dataStoreMapSizeKB, + }, + }); + await blobSink.start(); // Start anvil. We go via a wrapper script to ensure if the parent dies, anvil dies. logger.verbose('Starting anvil...'); @@ -406,6 +423,7 @@ async function setupFromFresh( watcher, cheatCodes, dateProvider, + blobSink, directoryToCleanup, }; } @@ -419,12 +437,26 @@ async function setupFromState(statePath: string, logger: Logger): Promise Promise; }; @@ -380,6 +385,11 @@ export async function setup( return await setupWithRemoteEnvironment(publisherHdAccount!, config, logger, numberOfAccounts); } + // Blob sink service - blobs get posted here and served from here + const blobSinkPort = await getPort(); + const blobSink = await createBlobSinkServer({ port: blobSinkPort }); + config.blobSinkUrl = `http://127.0.0.1:${blobSinkPort}`; + const deployL1ContractsValues = opts.deployL1ContractsValues ?? (await setupL1Contracts(config.l1RpcUrl, publisherHdAccount!, logger, opts, chain)); @@ -490,6 +500,7 @@ export async function setup( await anvil?.stop(); await watcher.stop(); + await blobSink?.stop(); if (directoryToCleanup) { logger.verbose(`Cleaning up data directory at ${directoryToCleanup}`); @@ -510,6 +521,7 @@ export async function setup( sequencer, watcher, dateProvider, + blobSink, teardown, }; } diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 08932fbdb4a..a8117b5a5db 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -21,6 +21,9 @@ { "path": "../bb-prover" }, + { + "path": "../blob-sink" + }, { "path": "../bot" }, diff --git a/yarn-project/foundation/src/blob/blob.test.ts b/yarn-project/foundation/src/blob/blob.test.ts index e4a5746ec06..da4caa8fc74 100644 --- a/yarn-project/foundation/src/blob/blob.test.ts +++ b/yarn-project/foundation/src/blob/blob.test.ts @@ -78,15 +78,19 @@ describe('blob', () => { // This test ensures that the Blob class correctly matches the c-kzg lib // The values here are used to test Noir's blob evaluation in noir-projects/noir-protocol-circuits/crates/blob/src/blob.nr -> test_400 const blobItems = Array(400).fill(new Fr(3)); - const ourBlob = new Blob(blobItems); + const ourBlob = Blob.fromFields(blobItems); const blobItemsHash = poseidon2Hash(Array(400).fill(new Fr(3))); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); - expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); + + // We add zeros before getting commitment as we do not store the blob along with + // all of the zeros + const dataWithZeros = Buffer.concat([ourBlob.data], BYTES_PER_BLOB); + expect(blobToKzgCommitment(dataWithZeros)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); expect(z).toEqual(ourBlob.challengeZ); - const res = computeKzgProof(ourBlob.data, ourBlob.challengeZ.toBuffer()); + const res = computeKzgProof(dataWithZeros, ourBlob.challengeZ.toBuffer()); expect(res[0]).toEqual(ourBlob.proof); expect(res[1]).toEqual(ourBlob.evaluationY); @@ -112,8 +116,9 @@ describe('blob', () => { const blobItemsHash = poseidon2Hash(blobItems); const blobs = Blob.getBlobs(blobItems); blobs.forEach(ourBlob => { - // const ourBlob = new Blob(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); + // const ourBlob = Blob.fromFields(blobItems.slice(j * FIELD_ELEMENTS_PER_BLOB, (j + 1) * FIELD_ELEMENTS_PER_BLOB), blobItemsHash); expect(blobItemsHash).toEqual(ourBlob.fieldsHash); + expect(blobToKzgCommitment(ourBlob.data)).toEqual(ourBlob.commitment); const z = poseidon2Hash([blobItemsHash, ...ourBlob.commitmentToFields()]); @@ -132,4 +137,11 @@ describe('blob', () => { expect(isValid).toBe(true); }); }); + + it('Should serialise and deserialise a blob', () => { + const blob = Blob.fromFields([Fr.random(), Fr.random(), Fr.random()]); + const blobBuffer = blob.toBuffer(); + const deserialisedBlob = Blob.fromBuffer(blobBuffer); + expect(blob.fieldsHash.equals(deserialisedBlob.fieldsHash)).toBe(true); + }); }); diff --git a/yarn-project/foundation/src/blob/index.ts b/yarn-project/foundation/src/blob/index.ts index 6c1651f4c56..381e7141c09 100644 --- a/yarn-project/foundation/src/blob/index.ts +++ b/yarn-project/foundation/src/blob/index.ts @@ -3,7 +3,7 @@ import type { Blob as BlobBuffer } from 'c-kzg'; import { poseidon2Hash, sha256 } from '../crypto/index.js'; import { Fr } from '../fields/index.js'; -import { serializeToBuffer } from '../serialize/index.js'; +import { BufferReader, serializeToBuffer } from '../serialize/index.js'; // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: /* eslint-disable import/no-named-as-default-member */ @@ -36,48 +36,48 @@ export const VERSIONED_HASH_VERSION_KZG = 0x01; * A class to create, manage, and prove EVM blobs. */ export class Blob { - /** The blob to be broadcast on L1 in bytes form. */ - public readonly data: BlobBuffer; - /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ - public readonly fieldsHash: Fr; - /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ - public readonly challengeZ: Fr; - /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ - public readonly evaluationY: Buffer; - /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ - public readonly commitment: Buffer; - /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ - public readonly proof: Buffer; - constructor( - /** All fields to be broadcast in the blob. */ - fields: Fr[], - /** If we want to broadcast more fields than fit into a blob, we hash those and used it as the fieldsHash across all blobs. - * This is much simpler and cheaper in the circuit to do, but MUST BE CHECKED before injecting here. - */ - multiBlobFieldsHash?: Fr, - ) { + /** The blob to be broadcast on L1 in bytes form. */ + public readonly data: BlobBuffer, + /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ + public readonly fieldsHash: Fr, + /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ + public readonly challengeZ: Fr, + /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ + public readonly evaluationY: Buffer, + /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ + public readonly commitment: Buffer, + /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ + public readonly proof: Buffer, + ) {} + + static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { if (fields.length > FIELD_ELEMENTS_PER_BLOB) { throw new Error( `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, ); } - this.data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); + // TODO: do not store zeros + const dataWithoutZeros = serializeToBuffer(fields); + const data = Buffer.concat([dataWithoutZeros], BYTES_PER_BLOB); + // This matches the output of SpongeBlob.squeeze() in the blob circuit - this.fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); - this.commitment = Buffer.from(blobToKzgCommitment(this.data)); - this.challengeZ = poseidon2Hash([this.fieldsHash, ...this.commitmentToFields()]); - const res = computeKzgProof(this.data, this.challengeZ.toBuffer()); - if (!verifyKzgProof(this.commitment, this.challengeZ.toBuffer(), res[1], res[0])) { + const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); + const commitment = Buffer.from(blobToKzgCommitment(data)); + const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); + const res = computeKzgProof(data, challengeZ.toBuffer()); + if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { throw new Error(`KZG proof did not verify.`); } - this.proof = Buffer.from(res[0]); - this.evaluationY = Buffer.from(res[1]); + const proof = Buffer.from(res[0]); + const evaluationY = Buffer.from(res[1]); + + return new Blob(dataWithoutZeros, fieldsHash, challengeZ, evaluationY, commitment, proof); } // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] commitmentToFields(): [Fr, Fr] { - return [new Fr(this.commitment.subarray(0, 31)), new Fr(this.commitment.subarray(31, 48))]; + return commitmentToFields(this.commitment); } // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers @@ -93,6 +93,49 @@ export class Blob { return hash; } + toBuffer(): Buffer { + return Buffer.from( + serializeToBuffer( + this.data.length, + this.data, + this.fieldsHash, + this.challengeZ, + this.evaluationY.length, + this.evaluationY, + this.commitment.length, + this.commitment, + this.proof.length, + this.proof, + ), + ); + } + + static fromBuffer(buf: Buffer | BufferReader): Blob { + const reader = BufferReader.asReader(buf); + return new Blob( + reader.readUint8Array(), + reader.readObject(Fr), + reader.readObject(Fr), + reader.readBuffer(), + reader.readBuffer(), + reader.readBuffer(), + ); + } + + /** + * Pad the blob data to it's full size before posting + */ + get fullData(): BlobBuffer { + return Buffer.concat([this.data], BYTES_PER_BLOB); + } + + /** + * Get the size of the blob in bytes + */ + getSize() { + return this.data.length; + } + // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: // * input[:32] - versioned_hash // * input[32:64] - z @@ -145,8 +188,13 @@ export class Blob { const res = []; for (let i = 0; i < numBlobs; i++) { const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; - res.push(new Blob(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); + res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); } return res; } } + +// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] +function commitmentToFields(commitment: Buffer): [Fr, Fr] { + return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 4828878d721..4aced06c365 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -100,6 +100,7 @@ export type EnvVar = | 'P2P_UDP_ANNOUNCE_ADDR' | 'P2P_UDP_LISTEN_ADDR' | 'PEER_ID_PRIVATE_KEY' + | 'PROVER_BLOB_SINK_URL' | 'PROOF_VERIFIER_L1_START_BLOCK' | 'PROOF_VERIFIER_POLL_INTERVAL_MS' | 'PROVER_AGENT_ENABLED' @@ -136,6 +137,7 @@ export type EnvVar = | 'REGISTRY_CONTRACT_ADDRESS' | 'ROLLUP_CONTRACT_ADDRESS' | 'SEQ_ALLOWED_SETUP_FN' + | 'SEQ_BLOB_SINK_URL' | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' | 'SEQ_MAX_TX_PER_BLOCK' | 'SEQ_MIN_TX_PER_BLOCK' diff --git a/yarn-project/foundation/src/serialize/buffer_reader.ts b/yarn-project/foundation/src/serialize/buffer_reader.ts index 7abe3f59336..84b2ea86277 100644 --- a/yarn-project/foundation/src/serialize/buffer_reader.ts +++ b/yarn-project/foundation/src/serialize/buffer_reader.ts @@ -307,6 +307,20 @@ export class BufferReader { return this.readBytes(size); } + /** + * Reads a buffer from the current position of the reader and advances the index. + * The method first reads the size (number) of bytes to be read, and then returns + * a Buffer with that size containing the bytes. Useful for reading variable-length + * binary data encoded as (size, data) format. + * + * @returns A Buffer containing the read bytes. + */ + public readUint8Array(): Uint8Array { + const size = this.readNumber(); + this.#rangeCheck(size); + return this.readBytes(size); + } + /** * Reads and constructs a map object from the current buffer using the provided deserializer. * The method reads the number of entries in the map, followed by iterating through each key-value pair. diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index 6698a7081e2..fc2638ac3e7 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -109,6 +109,7 @@ export function deserializeField(buf: Buffer, offset = 0) { export type Bufferable = | boolean | Buffer + | Uint8Array | number | bigint | string diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts index dfc5df7f105..cb3abd077e3 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts @@ -4,7 +4,7 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { AztecKVTxPool } from './aztec_kv_tx_pool.js'; import { describeTxPool } from './tx_pool_test_suite.js'; -describe('In-Memory TX pool', () => { +describe('KV TX pool', () => { let txPool: AztecKVTxPool; beforeEach(() => { txPool = new AztecKVTxPool(openTmpStore(), new NoopTelemetryClient()); diff --git a/yarn-project/package.json b/yarn-project/package.json index 7d31e95a5c8..8205824f371 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -12,7 +12,7 @@ "format": "yarn prettier --cache -w .", "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end --exclude @aztec/prover-client -p -v run test && yarn workspaces foreach --include @aztec/end-to-end -p -v run test:unit", "build": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose --exclude @aztec/aztec3-packages --exclude @aztec/docs run build", - "build:fast": "cd foundation && yarn build && cd ../l1-artifacts && yarn build && cd ../circuits.js && yarn build && cd .. && yarn generate && tsc -b", + "build:fast": "cd foundation && yarn build && cd ../l1-artifacts && yarn build && cd ../circuits.js && yarn build && cd .. && yarn generate && echo 'Running tsc...' && tsc -b && echo 'tsc done.'", "build:dev": "./watch.sh", "generate": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose run generate", "clean": "yarn workspaces foreach -p -v run clean" @@ -26,6 +26,7 @@ "aztec-node", "validator-client", "bb-prover", + "blob-sink", "bot", "builder", "pxe", diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index d373ef337b5..a1198d571b3 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -63,6 +63,7 @@ "@types/node": "^18.7.23", "concurrently": "^7.6.0", "eslint": "^8.37.0", + "express": "^4.21.1", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", "levelup": "^5.1.1", diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 367f2aa6677..d77efa57ca2 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -24,6 +24,11 @@ export type PublisherConfig = L1TxUtilsConfig & { * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; + + /** + * The URL of the blob sink. + */ + blobSinkUrl?: string; }; export const getTxSenderConfigMappings: ( @@ -72,6 +77,11 @@ export const getPublisherConfigMappings: ( description: 'The interval to wait between publish retries.', }, ...l1TxUtilsConfigMappings, + blobSinkUrl: { + env: `${scope}_BLOB_SINK_URL`, + description: 'The URL of the blob sink.', + parseEnv: (val?: string) => val, + }, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 64ac88119d5..839dbcc4c25 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -13,6 +13,9 @@ import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { jest } from '@jest/globals'; +import express, { json } from 'express'; +import { type Server } from 'http'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type GetTransactionReceiptReturnType, @@ -68,6 +71,9 @@ class MockRollupContract { } } +const BLOB_SINK_PORT = 5052; +const BLOB_SINK_URL = `http://localhost:${BLOB_SINK_PORT}`; + describe('L1Publisher', () => { let rollupContractRead: MockProxy; let rollupContractWrite: MockProxy; @@ -85,11 +91,16 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; + let mockBlobSinkServer: Server | undefined = undefined; + + // An l1 publisher with some private methods exposed let publisher: L1Publisher; const GAS_GUESS = 300_000n; beforeEach(() => { + mockBlobSinkServer = undefined; + l2Block = L2Block.random(42); header = l2Block.header.toBuffer(); @@ -112,6 +123,7 @@ describe('L1Publisher', () => { publicClient = mock(); l1TxUtils = mock(); const config = { + blobSinkUrl: BLOB_SINK_URL, l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, publisherPrivateKey: `0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80`, @@ -138,18 +150,61 @@ describe('L1Publisher', () => { (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); + const closeServer = (server: Server): Promise => { + return new Promise((resolve, reject) => { + server.close(err => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + }; + + afterEach(async () => { + if (mockBlobSinkServer) { + await closeServer(mockBlobSinkServer); + mockBlobSinkServer = undefined; + } + }); + + // Run a mock blob sink in the background, and test that the correct data is sent to it + const expectBlobsAreSentToBlobSink = (_blockId: string, blobs: Blob[]) => { + const sendToBlobSinkSpy = jest.spyOn(publisher as any, 'sendBlobsToBlobSink'); + + const app = express(); + app.use(json({ limit: '10mb' })); + + app.post('/blob_sidecar', (req, res) => { + const blobsBuffers = req.body.blobs.map((b: { index: number; blob: { type: string; data: string } }) => + Blob.fromBuffer(Buffer.from(b.blob.data)), + ); + + expect(blobsBuffers).toEqual(blobs); + res.status(200).send(); + }); + + mockBlobSinkServer = app.listen(BLOB_SINK_PORT); + + return sendToBlobSinkSpy; + }; + it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - const kzg = Blob.getViemKzgInstance(); const blobs = Blob.getBlobs(l2Block.body.toBlobFields()); + // Check the blobs were forwarded to the blob sink service + const sendToBlobSinkSpy = expectBlobsAreSentToBlobSink(blockHash.toString('hex'), blobs); + + const result = await publisher.proposeL2Block(l2Block); + + expect(result).toEqual(true); + const blobInput = Blob.getEthBlobEvaluationInputs(blobs); const args = [ @@ -173,8 +228,13 @@ describe('L1Publisher', () => { data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), }, { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, - { blobs: blobs.map(b => b.data), kzg, maxFeePerBlobGas: 10000000000n }, + { blobs: blobs.map(b => b.fullData), kzg, maxFeePerBlobGas: 10000000000n }, ); + + expect(sendToBlobSinkSpy).toHaveBeenCalledTimes(1); + // If this does not return true, then the mocked server will have errored, and + // the expects that run there will have failed + expect(sendToBlobSinkSpy).toHaveReturnedWith(Promise.resolve(true)); }); it('does not retry if sending a propose tx fails', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index a89969ede84..3e71a2b2d19 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -102,6 +102,8 @@ export type MinimalTransactionReceipt = { logs: any[]; /** Block number in which this tx was mined. */ blockNumber: bigint; + /** The block hash in which this tx was mined */ + blockHash: `0x${string}`; }; /** Arguments to the process method of the rollup contract */ @@ -170,6 +172,8 @@ export class L1Publisher { protected account: PrivateKeyAccount; protected ethereumSlotDuration: bigint; + private blobSinkUrl: string | undefined; + // @note - with blobs, the below estimate seems too large. // Total used for full block from int_l1_pub e2e test: 1m (of which 86k is 1x blob) // Total used for emptier block from above test: 429k (of which 84k is 1x blob) @@ -184,6 +188,7 @@ export class L1Publisher { ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; this.ethereumSlotDuration = BigInt(config.ethereumSlotDuration); + this.blobSinkUrl = config.blobSinkUrl; this.metrics = new L1PublisherMetrics(client, 'L1Publisher'); const { l1RpcUrl: rpcUrl, l1ChainId: chainId, publisherPrivateKey, l1Contracts } = config; @@ -536,15 +541,18 @@ export class L1Publisher { const consensusPayload = new ConsensusPayload(block.header, block.archive.root, txHashes ?? []); const digest = getHashedSignaturePayload(consensusPayload, SignatureDomainSeperator.blockAttestation); + + const blobs = Blob.getBlobs(block.body.toBlobFields()); const proposeTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), blockHash: block.header.hash().toBuffer(), body: block.body.toBuffer(), - blobs: Blob.getBlobs(block.body.toBlobFields()), + blobs, attestations, txHashes: txHashes ?? [], }; + // Publish body and propose block (if not already published) if (this.interrupted) { this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); @@ -587,6 +595,21 @@ export class L1Publisher { }; this.log.verbose(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); + + // Send the blobs to the blob sink + this.sendBlobsToBlobSink(receipt.blockHash, blobs).then( + (success: boolean) => { + if (success) { + this.log.info('Successfully sent blobs to blob sink'); + } else { + this.log.error('Failed to send blobs to blob sink'); + } + }, + _err => { + this.log.error('Failed to send blobs to blob sink'); + }, + ); + return true; } @@ -992,7 +1015,9 @@ export class L1Publisher { fixedGas: gas, }, { - blobs: encodedData.blobs.map(b => b.data), + // TODO(md): remove full data field, just use snappy compression when + // sending the sink server + blobs: encodedData.blobs.map(b => b.fullData), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1031,7 +1056,7 @@ export class L1Publisher { }, { fixedGas: gas }, { - blobs: encodedData.blobs.map(b => b.data), + blobs: encodedData.blobs.map(b => b.fullData), kzg, maxFeePerBlobGas: 10000000000n, //This is 10 gwei, taken from DEFAULT_MAX_FEE_PER_GAS }, @@ -1073,6 +1098,7 @@ export class L1Publisher { gasPrice: receipt.effectiveGasPrice, logs: receipt.logs, blockNumber: receipt.blockNumber, + blockHash: receipt.blockHash, }; } @@ -1088,9 +1114,51 @@ export class L1Publisher { protected async sleepOrInterrupted() { await this.interruptibleSleep.sleep(this.sleepTimeMs); } + + /** + * Send blobs to the blob sink + * + * If a blob sink url is configured, then we send blobs to the blob sink + * - for now we use the blockHash as the identifier for the blobs; + * In the future this will move to be the beacon block id - which takes a bit more work + * to calculate and will need to be mocked in e2e tests + */ + protected async sendBlobsToBlobSink(blockHash: string, blobs: Blob[]): Promise { + // TODO(md): for now we are assuming the indexes of the blobs will be 0, 1, 2 + // When in reality they will not, but for testing purposes this is fine + if (!this.blobSinkUrl) { + this.log.verbose('No blob sink url configured'); + return false; + } + + this.log.verbose(`Sending ${blobs.length} blobs to blob sink`); + try { + const res = await fetch(`${this.blobSinkUrl}/blob_sidecar`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + // eslint-disable-next-line camelcase + block_id: blockHash, + blobs: blobs.map((b, i) => ({ blob: b.toBuffer(), index: i })), + }), + }); + + if (res.ok) { + return true; + } + + this.log.error('Failed to send blobs to blob sink', res.status); + return false; + } catch (err) { + this.log.error(`Error sending blobs to blob sink`, err); + return false; + } + } } -/** +/* * Returns cost of calldata usage in Ethereum. * @param data - Calldata. * @returns 4 for each zero byte, 16 for each nonzero. diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 84fe768da50..d4889c3b2d9 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -6,6 +6,9 @@ * @see {@link https://opentelemetry.io/docs/specs/semconv/general/metrics/ | OpenTelemetry Metrics} for naming conventions. */ +export const BLOB_SINK_OBJECTS_IN_BLOB_STORE = 'aztec.blob_sink.objects_in_blob_store'; +export const BLOB_SINK_BLOB_SIZE = 'aztec.blob_sink.blob_size'; + /** How long it takes to simulate a circuit */ export const CIRCUIT_SIMULATION_DURATION = 'aztec.circuit.simulation.duration'; export const CIRCUIT_SIMULATION_INPUT_SIZE = 'aztec.circuit.simulation.input_size'; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index e511df70a0e..3f714d5946e 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -334,6 +334,32 @@ __metadata: languageName: node linkType: soft +"@aztec/blob-sink@workspace:^, @aztec/blob-sink@workspace:blob-sink": + version: 0.0.0-use.local + resolution: "@aztec/blob-sink@workspace:blob-sink" + dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/foundation": "workspace:^" + "@aztec/kv-store": "workspace:*" + "@aztec/telemetry-client": "workspace:*" + "@jest/globals": ^29.5.0 + "@types/jest": ^29.5.0 + "@types/memdown": ^3.0.0 + "@types/node": ^18.7.23 + "@types/source-map-support": ^0.5.10 + "@types/supertest": ^6.0.2 + express: ^4.21.1 + jest: ^29.5.0 + jest-mock-extended: ^3.0.3 + source-map-support: ^0.5.21 + supertest: ^7.0.0 + ts-node: ^10.9.1 + tslib: ^2.4.0 + typescript: ^5.0.4 + zod: ^3.23.8 + languageName: unknown + linkType: soft + "@aztec/bot@workspace:^, @aztec/bot@workspace:bot": version: 0.0.0-use.local resolution: "@aztec/bot@workspace:bot" @@ -531,6 +557,7 @@ __metadata: "@aztec/aztec-node": "workspace:^" "@aztec/aztec.js": "workspace:^" "@aztec/bb-prover": "workspace:^" + "@aztec/blob-sink": "workspace:^" "@aztec/bot": "workspace:^" "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" @@ -794,7 +821,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": +"@aztec/kv-store@workspace:*, @aztec/kv-store@workspace:^, @aztec/kv-store@workspace:kv-store": version: 0.0.0-use.local resolution: "@aztec/kv-store@workspace:kv-store" dependencies: @@ -1187,6 +1214,7 @@ __metadata: "@types/node": ^18.7.23 concurrently: ^7.6.0 eslint: ^8.37.0 + express: ^4.21.1 jest: ^29.5.0 jest-mock-extended: ^3.0.3 levelup: ^5.1.1 @@ -1239,7 +1267,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": +"@aztec/telemetry-client@workspace:*, @aztec/telemetry-client@workspace:^, @aztec/telemetry-client@workspace:telemetry-client": version: 0.0.0-use.local resolution: "@aztec/telemetry-client@workspace:telemetry-client" dependencies: @@ -5673,6 +5701,18 @@ __metadata: languageName: node linkType: hard +"@types/superagent@npm:^8.1.0": + version: 8.1.9 + resolution: "@types/superagent@npm:8.1.9" + dependencies: + "@types/cookiejar": ^2.1.5 + "@types/methods": ^1.1.4 + "@types/node": "*" + form-data: ^4.0.0 + checksum: 530d8c2e87706315c82c8c9696500c40621de3353bc54ea9b104947f3530243abf54d0a49a6ae219d4947606a102ceb94bedfc43b9cc49f74069a18cbb3be8e2 + languageName: node + linkType: hard + "@types/supertest@npm:^2.0.12": version: 2.0.16 resolution: "@types/supertest@npm:2.0.16" @@ -5682,6 +5722,16 @@ __metadata: languageName: node linkType: hard +"@types/supertest@npm:^6.0.2": + version: 6.0.2 + resolution: "@types/supertest@npm:6.0.2" + dependencies: + "@types/methods": ^1.1.4 + "@types/superagent": ^8.1.0 + checksum: 1eafa472665757a6fd984439d11f388ae0480c6d243a6884066c474c4e0357de5373316488da503b1690c3163e075ca8c64c0c4853b3bb7deb09e05d1b64e556 + languageName: node + linkType: hard + "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -10659,7 +10709,7 @@ __metadata: languageName: node linkType: hard -"express@npm:^4.19.2": +"express@npm:^4.19.2, express@npm:^4.21.1": version: 4.21.1 resolution: "express@npm:4.21.1" dependencies: @@ -11032,6 +11082,17 @@ __metadata: languageName: node linkType: hard +"formidable@npm:^3.5.1": + version: 3.5.2 + resolution: "formidable@npm:3.5.2" + dependencies: + dezalgo: ^1.0.4 + hexoid: ^2.0.0 + once: ^1.4.0 + checksum: 7c7972e8a15d45e6d2315a54d77f0900e5c610aff9b5730de326e2b34630604e1eff6c9d666e5504fba4c8818ccaed682d76a4fdb718b160c6afa2c250bf6a76 + languageName: node + linkType: hard + "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" @@ -11630,6 +11691,13 @@ __metadata: languageName: node linkType: hard +"hexoid@npm:^2.0.0": + version: 2.0.0 + resolution: "hexoid@npm:2.0.0" + checksum: 69a92b2bcd7c81c16557de017c59511643e3cb1f0d6e9e9b705859b798bfd059088e4d3cc85e9fe0a9e431007430f15393303c3e74320b5c4c28cb64fc7d8bb4 + languageName: node + linkType: hard + "hmac-drbg@npm:^1.0.1": version: 1.0.1 resolution: "hmac-drbg@npm:1.0.1" @@ -18260,6 +18328,23 @@ __metadata: languageName: node linkType: hard +"superagent@npm:^9.0.1": + version: 9.0.2 + resolution: "superagent@npm:9.0.2" + dependencies: + component-emitter: ^1.3.0 + cookiejar: ^2.1.4 + debug: ^4.3.4 + fast-safe-stringify: ^2.1.1 + form-data: ^4.0.0 + formidable: ^3.5.1 + methods: ^1.1.2 + mime: 2.6.0 + qs: ^6.11.0 + checksum: f471461b21f034d844fd0aca332128d61e3afb75c2ee5950f3339f2a3b5ca8b23e2861224f19ad9b43f21c9184d28b7d9384af5a4fde64fdef479efdb15036db + languageName: node + linkType: hard + "supertest@npm:^6.3.3": version: 6.3.4 resolution: "supertest@npm:6.3.4" @@ -18270,6 +18355,16 @@ __metadata: languageName: node linkType: hard +"supertest@npm:^7.0.0": + version: 7.0.0 + resolution: "supertest@npm:7.0.0" + dependencies: + methods: ^1.1.2 + superagent: ^9.0.1 + checksum: 974743aa511ec0f387135dfca05e378f6202366c81f0850dfbcc2c3d6fc690e856dda27e175c70db38510e21d87f331c0f62e1a942afea4c447953c647c26c8b + languageName: node + linkType: hard + "supports-color@npm:^2.0.0": version: 2.0.0 resolution: "supports-color@npm:2.0.0"