Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 30 additions & 3 deletions yarn-project/end-to-end/src/e2e_p2p/add_rollup.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { EthAddress } from '@aztec/aztec.js/addresses';
import { waitForProven } from '@aztec/aztec.js/contracts';
import { generateClaimSecret } from '@aztec/aztec.js/ethereum';
import { Fr } from '@aztec/aztec.js/fields';
import { waitForL1ToL2MessageReady } from '@aztec/aztec.js/messaging';
import { RollupCheatCodes } from '@aztec/aztec/testing';
import { FeeAssetHandlerContract, RegistryContract, RollupContract } from '@aztec/ethereum/contracts';
import { deployRollupForUpgrade } from '@aztec/ethereum/deploy-aztec-l1-contracts';
Expand All @@ -13,6 +14,7 @@ import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'
import { L1TxUtils, createL1TxUtils } from '@aztec/ethereum/l1-tx-utils';
import type { ExtendedViemWalletClient } from '@aztec/ethereum/types';
import { CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types';
import { retryUntil } from '@aztec/foundation/retry';
import { sleep } from '@aztec/foundation/sleep';
import {
GovernanceAbi,
Expand Down Expand Up @@ -42,7 +44,6 @@ import { shouldCollectMetrics } from '../fixtures/fixtures.js';
import { sendL1ToL2Message } from '../fixtures/l1_to_l2_messaging.js';
import { ATTESTER_PRIVATE_KEYS_START_INDEX, createNodes, createProverNode } from '../fixtures/setup_p2p_test.js';
import { setupSharedBlobStorage } from '../fixtures/utils.js';
import { waitForL1ToL2MessageSeen } from '../shared/wait_for_l1_to_l2_message.js';
import { TestWallet } from '../test-wallet/test_wallet.js';
import { P2PNetworkTest, SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES } from './p2p_network.js';

Expand All @@ -53,7 +54,7 @@ const BOOT_NODE_UDP_PORT = 4500;
const DATA_DIR = fs.mkdtempSync(path.join(os.tmpdir(), 'add-rollup-old-'));
const DATA_DIR_NEW = fs.mkdtempSync(path.join(os.tmpdir(), 'add-rollup-new-'));

jest.setTimeout(1000 * 60 * 10);
jest.setTimeout(1000 * 60 * 20);

/**
* This test emulates the addition of a new rollup to the registry and tests that cross-chain messages work.
Expand All @@ -80,6 +81,14 @@ describe('e2e_p2p_add_rollup', () => {
...SHORTENED_BLOCK_TIME_CONFIG_NO_PRUNES,
listenAddress: '127.0.0.1',
governanceProposerRoundSize: 10,
enableProposerPipelining: true,
// Allow validators to build empty checkpoints under pipelining so the chain keeps
// advancing while we wait for L1->L2 messages to land in the next checkpoint's inbox tree.
minTxsPerBlock: 0,
// Pipelining starts cycle for checkpoint N+1 during slot N, but the inbox tree for
// checkpoint N is only sealed when checkpoint N is published. inboxLag: 2 sources
// L1->L2 messages from checkpoint N-1 (already sealed), avoiding L1ToL2MessagesNotReadyError.
inboxLag: 2,
},
startProverNode: false, // Start one later using p2p.
});
Expand Down Expand Up @@ -307,7 +316,10 @@ describe('e2e_p2p_add_rollup', () => {
});

const makeMessageConsumable = async (msgHash: Fr) => {
await waitForL1ToL2MessageSeen(node, msgHash, { timeoutSeconds: 10 });
// Wait until the message is ready to be consumed (the rollup has reached the message's checkpoint).
// Using waitForL1ToL2MessageReady rather than isL1ToL2MessageSynced because with `inboxLag > 0`
// a synced message is not yet present in the latest checkpoint's inbox tree.
await waitForL1ToL2MessageReady(node, msgHash, { timeoutSeconds: 120 });

const { receipt } = await testContract.methods
.create_l2_to_l1_message_arbitrary_recipient_private(contentOutFromRollup, ethRecipient)
Expand Down Expand Up @@ -578,6 +590,21 @@ describe('e2e_p2p_add_rollup', () => {
// The new rollup should have no checkpoints
expect(await newRollup.getCheckpointNumber()).toBe(CheckpointNumber(0));

// Wait for the new rollup to publish its first checkpoint AND for `nodes[0]` to have synced
// it locally, before the second bridging step. The bridge wallet uses
// `syncChainTip: 'checkpointed'`, which falls back to the genesis block when no checkpoint
// exists. After warping ~500 epochs forward, txs anchored at genesis would expire before
// being included. We poll the node's local view (not just the L1 rollup contract) so the PXE
// and the assertion observe the same chain state.
t.logger.info(`Waiting for new rollup to publish its first checkpoint`);
await retryUntil(
async () => Number(await nodes[0].getCheckpointNumber('checkpointed')) > 0,
'newRollup first checkpoint synced by node',
300,
2,
);
t.logger.info(`New rollup published its first checkpoint`);

// Bridge into and out of the new rollup to ensure that it works.
await bridging(
nodes[0],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,14 @@ export class LightweightCheckpointBuilder {
totalManaUsed,
});

this.logger.debug(`Completed checkpoint ${this.checkpointNumber}`, {
checkpointNumber: this.checkpointNumber,
headerHash: header.hash().toString(),
checkpointOutHash: checkpointOutHash.toString(),
numPreviousCheckpointOutHashes: this.previousCheckpointOutHashes.length,
...header.toInspect(),
});

return new Checkpoint(newArchive, header, blocks, this.checkpointNumber, this.feeAssetPriceModifier);
}

Expand Down
16 changes: 14 additions & 2 deletions yarn-project/prover-client/src/orchestrator/orchestrator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -896,7 +896,11 @@ export class ProvingOrchestrator implements EpochProver {
},
),
async result => {
this.logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
this.logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`, {
blockNumber: provingState.blockNumber,
checkpointIndex: provingState.parentCheckpoint.index,
...result.inputs.toInspect(),
});

const leafLocation = provingState.setBlockRootRollupProof(result);
const checkpointProvingState = provingState.parentCheckpoint;
Expand Down Expand Up @@ -1015,6 +1019,11 @@ export class ProvingOrchestrator implements EpochProver {
signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber),
),
async result => {
this.logger.debug(`Completed block merge rollup proof for checkpoint ${provingState.index}`, {
checkpointIndex: provingState.index,
mergeLocation: location,
...result.inputs.toInspect(),
});
provingState.setBlockMergeRollupProof(location, result);
await this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
},
Expand Down Expand Up @@ -1067,7 +1076,10 @@ export class ProvingOrchestrator implements EpochProver {
return;
}

this.logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
this.logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}`, {
checkpointIndex: provingState.index,
...result.inputs.toInspect(),
});

const leafLocation = provingState.setCheckpointRootRollupProof(result);
const epochProvingState = provingState.parentEpoch;
Expand Down
7 changes: 4 additions & 3 deletions yarn-project/prover-node/src/job/epoch-proving-job.ts
Original file line number Diff line number Diff line change
Expand Up @@ -191,11 +191,12 @@ export class EpochProvingJob implements Traceable {
const previousHeader = previousBlockHeaders[checkpointIndex];
const l1ToL2Messages = this.getL1ToL2Messages(checkpoint);

this.log.verbose(`Starting processing checkpoint ${checkpoint.number}`, {
this.log.debug(`Starting processing checkpoint ${checkpoint.number}`, {
number: checkpoint.number,
checkpointHash: checkpoint.hash().toString(),
lastArchive: checkpoint.header.lastArchiveRoot,
previousHeader: previousHeader.hash(),
headerHash: checkpoint.header.hash().toString(),
numL1ToL2Messages: l1ToL2Messages.length,
previousBlockNumber: previousHeader.globalVariables.blockNumber,
uuid: this.uuid,
});

Expand Down
109 changes: 105 additions & 4 deletions yarn-project/prover-node/src/prover-node-publisher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -203,10 +203,14 @@ export class ProverNodePublisher {
const argsPublicInputs = [...publicInputs.toFields()];

if (!areArraysEqual(rollupPublicInputs, argsPublicInputs, (a, b) => a.equals(b))) {
const fmt = (inputs: Fr[] | readonly string[]) => inputs.map(x => x.toString()).join(', ');
throw new Error(
`Root rollup public inputs mismatch:\nRollup: ${fmt(rollupPublicInputs)}\nComputed:${fmt(argsPublicInputs)}`,
);
throw await reportPublicInputsMismatch({
rollupPublicInputs,
argsPublicInputs,
fromCheckpoint,
toCheckpoint,
rollupContract: this.rollupContract,
log: this.log,
});
}
}

Expand Down Expand Up @@ -372,3 +376,100 @@ export class ProverNodePublisher {
};
}
}

/**
* Decodes a `Root rollup public inputs mismatch`, fetches the on-chain CheckpointLog for any
* mismatching `checkpointHeaderHashes[i]`, emits a structured error log, and returns a thrown-ready
* Error with a human-readable summary.
*
* Layout of `RootRollupPublicInputs.toFields()`:
* [0] previousArchiveRoot
* [1] endArchiveRoot
* [2] outHash
* [3 .. 3+N-1] checkpointHeaderHashes[i] for i in 0..N-1 (N = MAX_CHECKPOINTS_PER_EPOCH)
* [3+N .. 3+3N-1] fees[i] = (recipient, value) for i in 0..N-1
* [3+3N .. 3+3N+4] EpochConstantData (chainId, version, vkTreeRoot, protocolContractsHash, proverId)
* [3+3N+5 ..] blobPublicInputs (FinalBlobAccumulator)
*/
async function reportPublicInputsMismatch(input: {
rollupPublicInputs: readonly Fr[];
argsPublicInputs: readonly Fr[];
fromCheckpoint: CheckpointNumber;
toCheckpoint: CheckpointNumber;
rollupContract: RollupContract;
log: Logger;
}): Promise<Error> {
const { rollupPublicInputs, argsPublicInputs, fromCheckpoint, toCheckpoint, rollupContract, log } = input;
const N = MAX_CHECKPOINTS_PER_EPOCH;
const constantsStart = 3 + 3 * N;
const blobStart = constantsStart + 5;
const constantLabels = ['chainId', 'version', 'vkTreeRoot', 'protocolContractsHash', 'proverId'];

const diffs: { index: number; label: string; rollup: Fr; computed: Fr; checkpointIndex?: number }[] = [];
const len = Math.max(rollupPublicInputs.length, argsPublicInputs.length);
for (let i = 0; i < len; i++) {
const a = rollupPublicInputs[i] ?? Fr.ZERO;
const b = argsPublicInputs[i] ?? Fr.ZERO;
if (a.equals(b)) {
continue;
}
let label: string;
let checkpointIndex: number | undefined;
if (i === 0) {
label = 'previousArchiveRoot';
} else if (i === 1) {
label = 'endArchiveRoot';
} else if (i === 2) {
label = 'outHash';
} else if (i < 3 + N) {
checkpointIndex = i - 3;
label = `checkpointHeaderHashes[${checkpointIndex}]`;
} else if (i < 3 + 3 * N) {
const feePairIndex = i - (3 + N);
const feeIndex = Math.floor(feePairIndex / 2);
const sub = feePairIndex % 2 === 0 ? 'recipient' : 'value';
label = `fees[${feeIndex}].${sub}`;
} else if (i < blobStart) {
label = `constants.${constantLabels[i - constantsStart]}`;
} else {
label = `blobPublicInputs[${i - blobStart}]`;
}
diffs.push({ index: i, label, rollup: a, computed: b, checkpointIndex });
}

// For each mismatching checkpointHeaderHash, fetch the L1 CheckpointLog so the operator can
// see what was published on-chain alongside the prover's recomputed hash.
const onChainCheckpoints = await Promise.all(
diffs
.filter(d => d.checkpointIndex !== undefined)
.map(async d => {
const checkpointNumber = CheckpointNumber(fromCheckpoint + d.checkpointIndex!);
try {
const cp = await rollupContract.getCheckpoint(checkpointNumber);
return { checkpointIndex: d.checkpointIndex!, checkpointNumber, headerHash: cp.headerHash.toString() };
} catch (err) {
return { checkpointIndex: d.checkpointIndex!, checkpointNumber, error: (err as Error).message };
}
}),
);

log.error(`Root rollup public inputs mismatch`, undefined, {
fromCheckpoint,
toCheckpoint,
numDiffs: diffs.length,
diffs: diffs.map(d => ({
index: d.index,
label: d.label,
rollup: d.rollup.toString(),
computed: d.computed.toString(),
})),
onChainCheckpoints,
});

const fmt = (inputs: readonly Fr[]) => inputs.map(x => x.toString()).join(', ');
const summary = diffs.map(d => `[${d.index} ${d.label}] L1=${d.rollup} prover=${d.computed}`).join('\n');
return new Error(
`Root rollup public inputs mismatch (${diffs.length} fields differ):\n${summary}\n` +
`Rollup: ${fmt(rollupPublicInputs)}\nComputed:${fmt(argsPublicInputs)}`,
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ import {
MaliciousCommitteeAttestationsAndSigners,
type ValidateCheckpointResult,
} from '@aztec/stdlib/block';
import { type Checkpoint, type ProposedCheckpointData, validateCheckpoint } from '@aztec/stdlib/checkpoint';
import {
computeQuorum,
getEpochAtSlot,
getSlotStartBuildTimestamp,
getTimestampForSlot,
} from '@aztec/stdlib/epoch-helpers';
type Checkpoint,
type ProposedCheckpointData,
getPreviousCheckpointOutHashes,
validateCheckpoint,
} from '@aztec/stdlib/checkpoint';
import { computeQuorum, getSlotStartBuildTimestamp, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers';
import { Gas } from '@aztec/stdlib/gas';
import {
type BlockBuilderOptions,
Expand Down Expand Up @@ -416,37 +416,6 @@ export class CheckpointProposalJob implements Traceable {
}
}

/**
* Returns the out hashes of all checkpoints in `targetEpoch` that precede the one being built.
* Under pipelining, the parent checkpoint may not be on L1 yet at build time, so the on-chain
* archiver is missing it; in that case we splice in the parent's `checkpointOutHash` from the
* proposed-checkpoint payload (when it is in the same epoch) so the resulting `epochOutHash`
* matches what other validators and L1 will compute once the parent lands.
*/
private async collectPreviousCheckpointOutHashes(): Promise<Fr[]> {
const parentCheckpointNumber = CheckpointNumber(this.checkpointNumber - 1);
const checkpointed = (await this.l2BlockSource.getCheckpointsData({ epoch: this.targetEpoch }))
.filter(c => c.checkpointNumber < this.checkpointNumber)
.map(c => ({ checkpointNumber: c.checkpointNumber, checkpointOutHash: c.checkpointOutHash }));

const shouldSpliceParent =
this.epochCache.isProposerPipeliningEnabled() &&
this.proposedCheckpointData !== undefined &&
this.proposedCheckpointData.checkpointNumber === parentCheckpointNumber &&
getEpochAtSlot(this.proposedCheckpointData.header.slotNumber, this.epochCache.getL1Constants()) ===
this.targetEpoch &&
!checkpointed.some(c => c.checkpointNumber === parentCheckpointNumber);

if (shouldSpliceParent) {
checkpointed.push({
checkpointNumber: parentCheckpointNumber,
checkpointOutHash: this.proposedCheckpointData!.checkpointOutHash,
});
}

return checkpointed.sort((a, b) => a.checkpointNumber - b.checkpointNumber).map(c => c.checkpointOutHash);
}

/**
* Waits for the parent checkpoint to land on L1 before submitting a pipelined checkpoint.
* Polls until the archiver has synced L1 past the parent's slot, then verifies:
Expand Down Expand Up @@ -620,11 +589,19 @@ export class CheckpointProposalJob implements Traceable {
const inHash = computeInHashFromL1ToL2Messages(l1ToL2Messages);

// Collect the out hashes of all the checkpoints before this one in the same epoch.
// Under pipelining, the parent checkpoint may not be on L1 yet at build time, so
// `getCheckpointsData` would miss it. Splice in the parent's checkpointOutHash from the
// proposed-checkpoint payload so the resulting `epochOutHash` matches what the validators
// (and L1) compute once the parent lands on L1.
const previousCheckpointOutHashes = await this.collectPreviousCheckpointOutHashes();
// Under pipelining the parent checkpoint may not be on L1 yet at build time, so the helper
// splices in the parent's checkpointOutHash from the locally-known proposed checkpoint so
// the resulting `epochOutHash` matches what validators (and L1) compute once the parent
// lands on L1.
const previousCheckpointOutHashes = await getPreviousCheckpointOutHashes({
blockSource: this.l2BlockSource,
epoch: this.targetEpoch,
checkpointNumber: this.checkpointNumber,
l1Constants: this.epochCache.getL1Constants(),
pipeliningEnabled: this.epochCache.isProposerPipeliningEnabled(),
proposedCheckpointData: this.proposedCheckpointData,
log: this.log,
});

// Anchor the modifier to the predicted parent fee header: L1 will apply it against
// that, not against the latest published checkpoint (which lags by one under pipelining).
Expand Down
1 change: 1 addition & 0 deletions yarn-project/stdlib/src/checkpoint/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@ export * from './checkpoint.js';
export * from './checkpoint_data.js';
export * from './checkpoint_info.js';
export * from './digest.js';
export * from './previous_checkpoint_out_hashes.js';
export * from './published_checkpoint.js';
export * from './validate.js';
Loading
Loading