Skip to content

Commit

Permalink
feat: unencrypted log filtering (#2600)
Browse files Browse the repository at this point in the history
Fixes #1498
Fixes #1500
  • Loading branch information
benesjan authored Oct 12, 2023
1 parent dd7df14 commit 7ae554a
Show file tree
Hide file tree
Showing 43 changed files with 1,008 additions and 156 deletions.
9 changes: 8 additions & 1 deletion docs/docs/dev_docs/contracts/syntax/events.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ Once emitted, unencrypted events are stored in AztecNode and can be queried by a
<TabItem value="cli" label="Aztec CLI">

```bash
aztec-cli get-logs --from 5 --limit 1
aztec-cli get-logs --fromBlock 5
```

</TabItem>
Expand All @@ -122,6 +122,13 @@ aztec-cli get-logs --from 5 --limit 1
</TabItem>
</Tabs>

Get logs functionality provides a variety of filtering options.
To display them run:

```bash
aztec-cli get-logs --help
```

## Costs

All event data is pushed to Ethereum as calldata by the sequencer and for this reason the cost of emitting an event is non-trivial.
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/acir-simulator/src/acvm/oracle/oracle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ export class Oracle {
const logPayload = Buffer.concat(message.map(charBuffer => convertACVMFieldToBuffer(charBuffer).subarray(-1)));
const log = new UnencryptedL2Log(
AztecAddress.fromString(contractAddress),
FunctionSelector.fromField(fromACVMField(eventSelector)),
FunctionSelector.fromField(fromACVMField(eventSelector)), // TODO https://github.com/AztecProtocol/aztec-packages/issues/2632
logPayload,
);

Expand Down
2 changes: 1 addition & 1 deletion yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ describe('Archiver', () => {

beforeEach(() => {
publicClient = mock<PublicClient<HttpTransport, Chain>>();
archiverStore = new MemoryArchiverStore();
archiverStore = new MemoryArchiverStore(1000);
});

it('can start, sync and stop and handle l1 to l2 messages and logs', async () => {
Expand Down
15 changes: 13 additions & 2 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
ContractDataSource,
EncodedContractFunction,
ExtendedContractData,
GetUnencryptedLogsResponse,
INITIAL_L2_BLOCK_NUM,
L1ToL2Message,
L1ToL2MessageSource,
Expand All @@ -18,6 +19,7 @@ import {
L2BlockSource,
L2LogsSource,
L2Tx,
LogFilter,
LogType,
TxHash,
} from '@aztec/types';
Expand Down Expand Up @@ -100,7 +102,7 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource
transport: http(chain.rpcUrl),
pollingInterval: config.viemPollingIntervalMS,
});
const archiverStore = new MemoryArchiverStore();
const archiverStore = new MemoryArchiverStore(config.maxLogs ?? 1000);
const archiver = new Archiver(
publicClient,
config.l1Contracts.rollupAddress,
Expand Down Expand Up @@ -165,7 +167,7 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource
* This is a problem for example when setting the last block number marker for L1 to L2 messages -
* this.lastProcessedBlockNumber = currentBlockNumber;
* It's possible that we actually received messages in block currentBlockNumber + 1 meaning the next time
* we do this sync we get the same message again. Addtionally, the call to get cancelled L1 to L2 messages
* we do this sync we get the same message again. Additionally, the call to get cancelled L1 to L2 messages
* could read from a block not present when retrieving pending messages. If a message was added and cancelled
* in the same eth block then we could try and cancel a non-existent pending message.
*
Expand Down Expand Up @@ -389,6 +391,15 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource
return this.store.getLogs(from, limit, logType);
}

/**
* Gets unencrypted logs based on the provided filter.
* @param filter - The filter to apply to the logs.
* @returns The requested logs.
*/
getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
return this.store.getUnencryptedLogs(filter);
}

/**
* Gets the number of the latest L2 block processed by the block source implementation.
* @returns The number of the latest L2 block processed by the block source implementation.
Expand Down
238 changes: 234 additions & 4 deletions yarn-project/archiver/src/archiver/archiver_store.test.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
import { INITIAL_L2_BLOCK_NUM, L2Block, L2BlockL2Logs, LogType } from '@aztec/types';
import {
INITIAL_L2_BLOCK_NUM,
L2Block,
L2BlockContext,
L2BlockL2Logs,
LogId,
LogType,
TxHash,
UnencryptedL2Log,
} from '@aztec/types';

import { randomBytes } from 'crypto';

import { ArchiverDataStore, MemoryArchiverStore } from './archiver_store.js';

describe('Archiver Memory Store', () => {
let archiverStore: ArchiverDataStore;

beforeEach(() => {
archiverStore = new MemoryArchiverStore();
archiverStore = new MemoryArchiverStore(1000);
});

it('can store and retrieve blocks', async () => {
const blocks = Array(10)
.fill(0)
.map((_, index) => L2Block.random(index));
await archiverStore.addL2Blocks(blocks);
// Offset indices by INTIAL_L2_BLOCK_NUM to ensure we are correctly aligned
// Offset indices by INITIAL_L2_BLOCK_NUM to ensure we are correctly aligned
for (const [from, limit] of [
[0 + INITIAL_L2_BLOCK_NUM, 10],
[3 + INITIAL_L2_BLOCK_NUM, 3],
Expand All @@ -34,7 +45,7 @@ describe('Archiver Memory Store', () => {
.fill(0)
.map(_ => L2BlockL2Logs.random(6, 3, 2));
await archiverStore.addLogs(logs, logType);
// Offset indices by INTIAL_L2_BLOCK_NUM to ensure we are correctly aligned
// Offset indices by INITIAL_L2_BLOCK_NUM to ensure we are correctly aligned
for (const [from, limit] of [
[0 + INITIAL_L2_BLOCK_NUM, 10],
[3 + INITIAL_L2_BLOCK_NUM, 3],
Expand Down Expand Up @@ -71,4 +82,223 @@ describe('Archiver Memory Store', () => {
);
},
);

describe('getUnencryptedLogs config', () => {
it('does not return more than "maxLogs" logs', async () => {
const maxLogs = 5;
archiverStore = new MemoryArchiverStore(maxLogs);
const blocks = Array(10)
.fill(0)
.map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2));

await archiverStore.addL2Blocks(blocks);
await archiverStore.addLogs(
blocks.map(block => block.newUnencryptedLogs!),
LogType.UNENCRYPTED,
);

const response = await archiverStore.getUnencryptedLogs({});

expect(response.maxLogsHit).toBeTruthy();
expect(response.logs.length).toEqual(maxLogs);
});
});

describe('getUnencryptedLogs filtering', () => {
const txsPerBlock = 4;
const numPublicFunctionCalls = 3;
const numUnencryptedLogs = 4;
const numBlocks = 10;
let blocks: L2Block[];

beforeEach(async () => {
blocks = Array(numBlocks)
.fill(0)
.map((_, index: number) =>
L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs),
);

await archiverStore.addL2Blocks(blocks);
await archiverStore.addLogs(
blocks.map(block => block.newUnencryptedLogs!),
LogType.UNENCRYPTED,
);
});

it('"txHash" filter param is respected', async () => {
// get random tx
const targetBlockIndex = Math.floor(Math.random() * numBlocks);
const targetTxIndex = Math.floor(Math.random() * txsPerBlock);
const targetTxHash = new L2BlockContext(blocks[targetBlockIndex]).getTxHash(targetTxIndex);

const response = await archiverStore.getUnencryptedLogs({ txHash: targetTxHash });
const logs = response.logs;

expect(response.maxLogsHit).toBeFalsy();

const expectedNumLogs = numPublicFunctionCalls * numUnencryptedLogs;
expect(logs.length).toEqual(expectedNumLogs);

const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM;
for (const log of logs) {
expect(log.id.blockNumber).toEqual(targeBlockNumber);
expect(log.id.txIndex).toEqual(targetTxIndex);
}
});

it('"fromBlock" and "toBlock" filter params are respected', async () => {
// Set "fromBlock" and "toBlock"
const fromBlock = 3;
const toBlock = 7;

const response = await archiverStore.getUnencryptedLogs({ fromBlock, toBlock });
const logs = response.logs;

expect(response.maxLogsHit).toBeFalsy();

const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numUnencryptedLogs * (toBlock - fromBlock);
expect(logs.length).toEqual(expectedNumLogs);

for (const log of logs) {
const blockNumber = log.id.blockNumber;
expect(blockNumber).toBeGreaterThanOrEqual(fromBlock);
expect(blockNumber).toBeLessThan(toBlock);
}
});

it('"afterLog" filter param is respected', async () => {
// Get a random log as reference
const targetBlockIndex = Math.floor(Math.random() * numBlocks);
const targetTxIndex = Math.floor(Math.random() * txsPerBlock);
const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs);

const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);

const response = await archiverStore.getUnencryptedLogs({ afterLog });
const logs = response.logs;

expect(response.maxLogsHit).toBeFalsy();

for (const log of logs) {
const logId = log.id;
expect(logId.blockNumber).toBeGreaterThanOrEqual(afterLog.blockNumber);
if (logId.blockNumber === afterLog.blockNumber) {
expect(logId.txIndex).toBeGreaterThanOrEqual(afterLog.txIndex);
if (logId.txIndex === afterLog.txIndex) {
expect(logId.logIndex).toBeGreaterThan(afterLog.logIndex);
}
}
}
});

it('"contractAddress" filter param is respected', async () => {
// Get a random contract address from the logs
const targetBlockIndex = Math.floor(Math.random() * numBlocks);
const targetTxIndex = Math.floor(Math.random() * txsPerBlock);
const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls);
const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs);
const targetContractAddress = UnencryptedL2Log.fromBuffer(
blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[
targetLogIndex
],
).contractAddress;

const response = await archiverStore.getUnencryptedLogs({ contractAddress: targetContractAddress });

expect(response.maxLogsHit).toBeFalsy();

for (const extendedLog of response.logs) {
expect(extendedLog.log.contractAddress.equals(targetContractAddress)).toBeTruthy();
}
});

it('"selector" filter param is respected', async () => {
// Get a random selector from the logs
const targetBlockIndex = Math.floor(Math.random() * numBlocks);
const targetTxIndex = Math.floor(Math.random() * txsPerBlock);
const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls);
const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs);
const targetSelector = UnencryptedL2Log.fromBuffer(
blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[
targetLogIndex
],
).selector;

const response = await archiverStore.getUnencryptedLogs({ selector: targetSelector });

expect(response.maxLogsHit).toBeFalsy();

for (const extendedLog of response.logs) {
expect(extendedLog.log.selector.equals(targetSelector)).toBeTruthy();
}
});

it('"txHash" filter param is ignored when "afterLog" is set', async () => {
// Get random txHash
const txHash = new TxHash(randomBytes(TxHash.SIZE));
const afterLog = new LogId(1, 0, 0);

const response = await archiverStore.getUnencryptedLogs({ txHash, afterLog });
expect(response.logs.length).toBeGreaterThan(1);
});

it('intersecting works', async () => {
let logs = (await archiverStore.getUnencryptedLogs({ fromBlock: -10, toBlock: -5 })).logs;
expect(logs.length).toBe(0);

// "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive
logs = (await archiverStore.getUnencryptedLogs({ fromBlock: -10, toBlock: 5 })).logs;
let blockNumbers = new Set(logs.map(log => log.id.blockNumber));
expect(blockNumbers).toEqual(new Set([1, 2, 3, 4]));

// "toBlock" should be exclusive
logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 1, toBlock: 1 })).logs;
expect(logs.length).toBe(0);

logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 10, toBlock: 5 })).logs;
expect(logs.length).toBe(0);

// both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned
logs = (await archiverStore.getUnencryptedLogs({ fromBlock: -100, toBlock: +100 })).logs;
blockNumbers = new Set(logs.map(log => log.id.blockNumber));
expect(blockNumbers.size).toBe(numBlocks);

// intersecting with "afterLog" works
logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs;
blockNumbers = new Set(logs.map(log => log.id.blockNumber));
expect(blockNumbers).toEqual(new Set([4]));

logs = (await archiverStore.getUnencryptedLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs;
expect(logs.length).toBe(0);

logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) }))
.logs;
expect(logs.length).toBe(0);
});

it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async () => {
// Get a random log as reference
const targetBlockIndex = Math.floor(Math.random() * numBlocks);
const targetTxIndex = Math.floor(Math.random() * txsPerBlock);
const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs);

const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex);

const response = await archiverStore.getUnencryptedLogs({ afterLog, fromBlock: afterLog.blockNumber });
const logs = response.logs;

expect(response.maxLogsHit).toBeFalsy();

for (const log of logs) {
const logId = log.id;
expect(logId.blockNumber).toBeGreaterThanOrEqual(afterLog.blockNumber);
if (logId.blockNumber === afterLog.blockNumber) {
expect(logId.txIndex).toBeGreaterThanOrEqual(afterLog.txIndex);
if (logId.txIndex === afterLog.txIndex) {
expect(logId.logIndex).toBeGreaterThan(afterLog.logIndex);
}
}
}
});
});
});
Loading

0 comments on commit 7ae554a

Please sign in to comment.