-
Notifications
You must be signed in to change notification settings - Fork 324
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
326 additions
and
248 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,200 @@ | ||
// Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err: | ||
import cKzg from 'c-kzg'; | ||
import type { Blob as BlobBuffer } from 'c-kzg'; | ||
|
||
import { poseidon2Hash, sha256 } from '../crypto/index.js'; | ||
import { Fr } from '../fields/index.js'; | ||
import { BufferReader, serializeToBuffer } from '../serialize/index.js'; | ||
import { deserializeEncodedBlobFields } from './encoding.js'; | ||
|
||
/* eslint-disable import/no-named-as-default-member */ | ||
const { BYTES_PER_BLOB, FIELD_ELEMENTS_PER_BLOB, blobToKzgCommitment, computeKzgProof, verifyKzgProof } = cKzg; | ||
|
||
// The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification | ||
export const VERSIONED_HASH_VERSION_KZG = 0x01; | ||
|
||
/** | ||
* A class to create, manage, and prove EVM blobs. | ||
*/ | ||
export class Blob { | ||
constructor( | ||
/** The blob to be broadcast on L1 in bytes form. */ | ||
public readonly data: BlobBuffer, | ||
/** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */ | ||
public readonly fieldsHash: Fr, | ||
/** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */ | ||
public readonly challengeZ: Fr, | ||
/** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */ | ||
public readonly evaluationY: Buffer, | ||
/** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */ | ||
public readonly commitment: Buffer, | ||
/** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */ | ||
public readonly proof: Buffer, | ||
) {} | ||
|
||
static fromEncodedBlobBuffer(blob: BlobBuffer, multiBlobFieldsHash?: Fr): Blob { | ||
const fields: Fr[] = deserializeEncodedBlobFields(blob); | ||
return Blob.fromFields(fields, multiBlobFieldsHash); | ||
} | ||
|
||
static fromFields(fields: Fr[], multiBlobFieldsHash?: Fr): Blob { | ||
if (fields.length > FIELD_ELEMENTS_PER_BLOB) { | ||
throw new Error( | ||
`Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`, | ||
); | ||
} | ||
|
||
const data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB); | ||
|
||
// This matches the output of SpongeBlob.squeeze() in the blob circuit | ||
const fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields); | ||
const commitment = Buffer.from(blobToKzgCommitment(data)); | ||
const challengeZ = poseidon2Hash([fieldsHash, ...commitmentToFields(commitment)]); | ||
const res = computeKzgProof(data, challengeZ.toBuffer()); | ||
if (!verifyKzgProof(commitment, challengeZ.toBuffer(), res[1], res[0])) { | ||
throw new Error(`KZG proof did not verify.`); | ||
} | ||
const proof = Buffer.from(res[0]); | ||
const evaluationY = Buffer.from(res[1]); | ||
|
||
return new Blob(data, fieldsHash, challengeZ, evaluationY, commitment, proof); | ||
} | ||
|
||
// TODO: add unit test | ||
static fromJson(json: { blob: string; kzg_commitment: string; kzg_proof: string }): Blob { | ||
const blobBuffer = Buffer.from(json.blob.slice(2), 'hex'); | ||
|
||
const blob = Blob.fromEncodedBlobBuffer(blobBuffer); | ||
|
||
if (blob.commitment.toString('hex') !== json.kzg_commitment.slice(2)) { | ||
throw new Error('KZG commitment does not match'); | ||
} | ||
|
||
return blob; | ||
} | ||
|
||
// TODO: think if naming should change for encoded / non encoded blob payloads | ||
toFields(): Fr[] { | ||
return deserializeEncodedBlobFields(this.data); | ||
} | ||
|
||
toEncodedFields(): Fr[] { | ||
return deserializeEncodedBlobFields(this.data); | ||
} | ||
|
||
// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] | ||
commitmentToFields(): [Fr, Fr] { | ||
return commitmentToFields(this.commitment); | ||
} | ||
|
||
// Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers | ||
getEthVersionedBlobHash(): Buffer { | ||
const hash = sha256(this.commitment); | ||
hash[0] = VERSIONED_HASH_VERSION_KZG; | ||
return hash; | ||
} | ||
|
||
static getEthVersionedBlobHash(commitment: Buffer): Buffer { | ||
const hash = sha256(commitment); | ||
hash[0] = VERSIONED_HASH_VERSION_KZG; | ||
return hash; | ||
} | ||
|
||
toBuffer(): Buffer { | ||
return Buffer.from( | ||
serializeToBuffer( | ||
this.data.length, | ||
this.data, | ||
this.fieldsHash, | ||
this.challengeZ, | ||
this.evaluationY.length, | ||
this.evaluationY, | ||
this.commitment.length, | ||
this.commitment, | ||
this.proof.length, | ||
this.proof, | ||
), | ||
); | ||
} | ||
|
||
static fromBuffer(buf: Buffer | BufferReader): Blob { | ||
const reader = BufferReader.asReader(buf); | ||
return new Blob( | ||
reader.readUint8Array(), | ||
reader.readObject(Fr), | ||
reader.readObject(Fr), | ||
reader.readBuffer(), | ||
reader.readBuffer(), | ||
reader.readBuffer(), | ||
); | ||
} | ||
|
||
/** | ||
* Get the size of the blob in bytes | ||
*/ | ||
getSize() { | ||
return this.data.length; | ||
} | ||
|
||
// Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile: | ||
// * input[:32] - versioned_hash | ||
// * input[32:64] - z | ||
// * input[64:96] - y | ||
// * input[96:144] - commitment C | ||
// * input[144:192] - proof (a commitment to the quotient polynomial q(X)) | ||
// See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile | ||
getEthBlobEvaluationInputs(): `0x${string}` { | ||
const buf = Buffer.concat([ | ||
this.getEthVersionedBlobHash(), | ||
this.challengeZ.toBuffer(), | ||
this.evaluationY, | ||
this.commitment, | ||
this.proof, | ||
]); | ||
return `0x${buf.toString('hex')}`; | ||
} | ||
|
||
static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` { | ||
let buf = Buffer.alloc(0); | ||
blobs.forEach(blob => { | ||
buf = Buffer.concat([ | ||
buf, | ||
blob.getEthVersionedBlobHash(), | ||
blob.challengeZ.toBuffer(), | ||
blob.evaluationY, | ||
blob.commitment, | ||
blob.proof, | ||
]); | ||
}); | ||
// For multiple blobs, we prefix the number of blobs: | ||
const lenBuf = Buffer.alloc(1); | ||
lenBuf.writeUint8(blobs.length); | ||
buf = Buffer.concat([lenBuf, buf]); | ||
return `0x${buf.toString('hex')}`; | ||
} | ||
|
||
static getViemKzgInstance() { | ||
return { | ||
blobToKzgCommitment: cKzg.blobToKzgCommitment, | ||
computeBlobKzgProof: cKzg.computeBlobKzgProof, | ||
}; | ||
} | ||
|
||
// Returns as many blobs as we require to broadcast the given fields | ||
// Assumes we share the fields hash between all blobs | ||
static getBlobs(fields: Fr[]): Blob[] { | ||
const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1); | ||
const multiBlobFieldsHash = poseidon2Hash(fields); | ||
const res = []; | ||
for (let i = 0; i < numBlobs; i++) { | ||
const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB; | ||
res.push(Blob.fromFields(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash)); | ||
} | ||
return res; | ||
} | ||
} | ||
|
||
// 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48] | ||
function commitmentToFields(commitment: Buffer): [Fr, Fr] { | ||
return [new Fr(commitment.subarray(0, 31)), new Fr(commitment.subarray(31, 48))]; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
import { Fr } from '@aztec/foundation/fields'; | ||
import { BufferReader, FieldReader } from '@aztec/foundation/serialize'; | ||
|
||
import type { Blob as BlobBuffer } from 'c-kzg'; | ||
|
||
export const TX_START_PREFIX = 8392562855083340404n; | ||
// These are helper constants to decode tx effects from blob encoded fields | ||
export const TX_START_PREFIX_BYTES_LENGTH = TX_START_PREFIX.toString(16).length / 2; | ||
// 7 bytes for: | 0 | txlen[0] | txlen[1] | 0 | REVERT_CODE_PREFIX | 0 | revertCode | | ||
export const TX_EFFECT_PREFIX_BYTE_LENGTH = TX_START_PREFIX_BYTES_LENGTH + 7; | ||
|
||
/** | ||
* Deserializes a blob buffer into an array of field elements. | ||
* | ||
* Blobs are converted into BN254 fields to perform a poseidon2 hash on them (fieldHash). | ||
* This method is sparse, meaning it does not include trailing zeros at the end of the blob. | ||
* | ||
* However, we cannot simply trim the zero's from the end of the blob, as some logs may include zero's | ||
* within them. | ||
* If we end on a set of zeros, such as the log below: | ||
* length 7: [ a, b, c, d, e, 0, 0] | ||
* | ||
* we will end up with the incorrect hash if we trim the zeros from the end. | ||
* | ||
* Each transactions logs contains a TX start prefix, which includes a string followed | ||
* by the length ( in field elements ) of the transaction's log. | ||
* | ||
* This function finds the end of the last transaction's logs, and returns the array up to this point. | ||
* | ||
* We search for a series of Tx Prefixes progressing the cursor in the field reader until we hit | ||
* a field that is not a Tx Prefix, this indicates that we have reached the end of the last transaction's logs. | ||
* | ||
* +------------------+------------------+------------------+------------------+ | ||
* | TX1 Start Prefix | TX1 Log Fields | TX2 Start Prefix | Padded zeros | | ||
* | [3 a,b,c] | [3, a, b, c] | [4 d,e,f,g] | [0, 0, 0, .., 0] | | ||
* +------------------+------------------+------------------+------------------+ | ||
* ^ | ||
* | | ||
* Function reads until here -------------------------------- | ||
* | ||
* @param blob - The blob buffer to deserialize. | ||
* @returns An array of field elements. | ||
*/ | ||
export function deserializeEncodedBlobFields(blob: BlobBuffer): Fr[] { | ||
// Convert blob buffer to array of field elements | ||
const reader = BufferReader.asReader(blob); | ||
const array = reader.readArray(blob.length >> 5, Fr); // >> 5 = / 32 (bytes per field) | ||
const fieldReader = FieldReader.asReader(array); | ||
|
||
// Read fields until we hit zeros at the end | ||
while (!fieldReader.isFinished()) { | ||
const currentField = fieldReader.peekField(); | ||
|
||
// Stop when we hit a zero field | ||
if (currentField.isZero()) { | ||
break; | ||
} | ||
|
||
// Skip the remaining fields in this transaction | ||
const len = getLengthFromFirstField(currentField); | ||
fieldReader.skip(len); | ||
} | ||
|
||
// Return array up to last non-zero field | ||
return array.slice(0, fieldReader.cursor); | ||
} | ||
|
||
export function getLengthFromFirstField(firstField: Fr): number { | ||
const buf = firstField.toBuffer().subarray(-TX_EFFECT_PREFIX_BYTE_LENGTH); | ||
return new Fr(buf.subarray(TX_START_PREFIX_BYTES_LENGTH + 1, TX_START_PREFIX_BYTES_LENGTH + 3)).toNumber(); | ||
} |
Oops, something went wrong.