Skip to content

Commit 33dcab2

Browse files
authored
test(e2e): equivocation recovery under proposer pipelining (#22831)
Adds a new e2e test that checks that the network can recover from a checkpoint posted to L1 that differs from the one broadcasted via p2p. Fixes A-870
1 parent b6be617 commit 33dcab2

7 files changed

Lines changed: 317 additions & 7 deletions

File tree

Lines changed: 279 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,279 @@
1+
import type { AztecNodeService } from '@aztec/aztec-node';
2+
import { EthAddress } from '@aztec/aztec.js/addresses';
3+
import { Fr } from '@aztec/aztec.js/fields';
4+
import type { Logger } from '@aztec/aztec.js/log';
5+
import { asyncMap } from '@aztec/foundation/async-map';
6+
import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types';
7+
import { times } from '@aztec/foundation/collection';
8+
import { SecretValue } from '@aztec/foundation/config';
9+
import { retryUntil } from '@aztec/foundation/retry';
10+
import { bufferToHex } from '@aztec/foundation/string';
11+
import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers';
12+
import { tryStop } from '@aztec/stdlib/interfaces/server';
13+
14+
import { jest } from '@jest/globals';
15+
import { privateKeyToAccount } from 'viem/accounts';
16+
17+
import { getPrivateKeyFromIndex } from '../fixtures/utils.js';
18+
import { EpochsTestContext } from './epochs_test.js';
19+
20+
jest.setTimeout(1000 * 60 * 15);
21+
22+
const NODE_COUNT = 4;
23+
24+
/**
25+
* E2E test for the equivocation recovery scenario under proposer pipelining.
26+
*
27+
* Two conflicting checkpoint proposals are gossiped during the same slot:
28+
* - Node A (holds all 4 validator keys) publishes the "real" checkpoint to L1
29+
* but never broadcasts via gossipsub (`skipBroadcastProposals + skipIncomingProposals`).
30+
* - The "X" node (B or C, whichever holds the slot proposer's key) broadcasts an
31+
* alternative checkpoint that reaches B/C/D via gossipsub but never lands on L1
32+
* (`skipPublishingCheckpointsPercent: 100`).
33+
*
34+
* The test verifies that L1 sync overrides the gossip-only proposal on all observer
35+
* nodes (B, C, D) once A's L1-confirmed checkpoint propagates via the archiver.
36+
*/
37+
describe('e2e_epochs/epochs_equivocation', () => {
38+
let logger: Logger;
39+
let test: EpochsTestContext;
40+
let nodes: AztecNodeService[];
41+
42+
afterEach(async () => {
43+
jest.restoreAllMocks();
44+
await test?.teardown();
45+
});
46+
47+
it('L1-confirmed checkpoint overrides gossip-only equivocating proposal', async () => {
48+
// Build 4 validators (V1..V4) using getPrivateKeyFromIndex(i+3), same convention as other epoch tests.
49+
const validators = times(NODE_COUNT, i => {
50+
const privateKey = bufferToHex(getPrivateKeyFromIndex(i + 3)!);
51+
const attester = EthAddress.fromString(privateKeyToAccount(privateKey).address);
52+
return { attester, withdrawer: attester, privateKey, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) };
53+
});
54+
55+
// Timing calculation for 3 blocks per checkpoint with 8s sub-slots:
56+
// - initializationOffset = 0.5s (test mode, ethereumSlotDuration < 8)
57+
// - 3 blocks x 8s = 24s
58+
// - checkpointFinalization = 0.5s (assemble) + 0 (p2p in test) + 2s (L1 publish) = 2.5s
59+
// - finalBlockDuration = 8s (re-execution)
60+
// - Total: 0.5 + 24 + 8 + 2.5 = 35s => use 36s
61+
test = await EpochsTestContext.setup({
62+
numberOfAccounts: 0,
63+
initialValidators: validators,
64+
enableProposerPipelining: true,
65+
inboxLag: 2,
66+
mockGossipSubNetwork: true,
67+
disableAnvilTestWatcher: true,
68+
startProverNode: false,
69+
aztecEpochDuration: 4,
70+
aztecProofSubmissionEpochs: 1024,
71+
enforceTimeTable: true,
72+
ethereumSlotDuration: 6,
73+
aztecSlotDuration: 36,
74+
blockDurationMs: 8000,
75+
attestationPropagationTime: 0.5,
76+
l1PublishingTime: 2,
77+
aztecTargetCommitteeSize: 4,
78+
skipInitialSequencer: true,
79+
});
80+
81+
logger = test.logger;
82+
83+
// We set different coinbase addresses so different nodes produce different blocks
84+
const coinbaseA = EthAddress.fromNumber(0xa);
85+
const coinbaseB = EthAddress.fromNumber(0xb);
86+
const coinbaseC = EthAddress.fromNumber(0xc);
87+
88+
// The private keys held by each node:
89+
// A: all 4 keys → self-attests with all validators, reaches quorum without inbound attestations
90+
// B: V1 + V2
91+
// C: V3 + V4
92+
// D: no validator keys (RPC-only observer)
93+
const keysA = validators.map(v => v.privateKey as `0x${string}`);
94+
const keysB = [validators[0].privateKey, validators[1].privateKey] as `0x${string}`[];
95+
const keysC = [validators[2].privateKey, validators[3].privateKey] as `0x${string}`[];
96+
97+
// All sequencers start with dontStartSequencer so we can warp the clock first.
98+
nodes = await asyncMap(
99+
[
100+
{
101+
keys: keysA,
102+
coinbase: coinbaseA,
103+
extraOpts: { skipIncomingProposals: true, skipBroadcastProposals: true },
104+
},
105+
{
106+
keys: keysB,
107+
coinbase: coinbaseB,
108+
extraOpts: { skipPublishingCheckpointsPercent: 100 },
109+
},
110+
{
111+
keys: keysC,
112+
coinbase: coinbaseC,
113+
extraOpts: { skipPublishingCheckpointsPercent: 100 },
114+
},
115+
],
116+
({ keys, coinbase, extraOpts }) =>
117+
test.createValidatorNode(keys, {
118+
dontStartSequencer: true,
119+
coinbase,
120+
buildCheckpointIfEmpty: true,
121+
minTxsPerBlock: 0,
122+
...extraOpts,
123+
}),
124+
);
125+
126+
// Node D: non-validator observer node
127+
const nodeD = await test.createNonValidatorNode({ buildCheckpointIfEmpty: true, minTxsPerBlock: 0 });
128+
nodes.push(nodeD);
129+
const [nodeB, nodeC] = nodes.slice(1);
130+
131+
logger.warn('All nodes created', { nodes: nodes.length });
132+
133+
// Determine the next proposer slot by scanning upcoming slots.
134+
// Since A holds all 4 keys and B/C each hold 2, the slot proposer is always held by A
135+
// and exactly one of B or C. We identify which one (X) and use its coinbase in assertions.
136+
const { slot: currentSlot } = test.epochCache.getEpochAndSlotNow();
137+
// Pick a target slot 2 ahead so there's room for the pipelining build window to engage.
138+
// With pipelining, the sequencer builds slot (targetSlot+1) while the clock is at targetSlot,
139+
// so the proposer we care about is for targetSlot+1 (the submission slot).
140+
const targetSlot = SlotNumber(currentSlot + 2);
141+
const submissionSlot = SlotNumber(targetSlot + 1);
142+
143+
const attesterAddresses = validators.map(v => EthAddress.fromString(privateKeyToAccount(v.privateKey).address));
144+
logger.warn('Validator attester addresses', {
145+
V1: attesterAddresses[0],
146+
V2: attesterAddresses[1],
147+
V3: attesterAddresses[2],
148+
V4: attesterAddresses[3],
149+
});
150+
logger.warn('Validator-to-node assignment', { A: 'V1,V2,V3,V4', B: 'V1,V2', C: 'V3,V4', D: 'none' });
151+
152+
const proposerAttester = await test.epochCache.getProposerAttesterAddressInSlot(submissionSlot);
153+
if (!proposerAttester) {
154+
throw new Error(`No proposer found for slot ${submissionSlot}`);
155+
}
156+
logger.warn(`Expected proposer for submission slot`, { submissionSlot, proposerAttester });
157+
158+
// Warp to one L1 slot before the target L2 slot so pipelining's build window engages.
159+
const slotStartTimestamp = getTimestampForSlot(targetSlot, test.constants);
160+
const warpTo = slotStartTimestamp - BigInt(test.L1_BLOCK_TIME_IN_S);
161+
logger.warn(`Warping to L1 timestamp ${warpTo} (one L1 slot before L2 slot ${targetSlot})`);
162+
await test.context.cheatCodes.eth.warp(Number(warpTo), { resetBlockInterval: true });
163+
164+
// Start all sequencers now that the clock is warped.
165+
const sequencers = nodes.slice(0, 3).map(n => n.getSequencer()!);
166+
const { failEvents } = test.watchSequencerEvents(sequencers, i => ({ validator: ['A', 'B', 'C'][i] }));
167+
await Promise.all(sequencers.map(s => s.start()));
168+
logger.warn('All sequencers started');
169+
170+
// Wait until each of B, C, D sees a proposed block for submissionSlot with coinbase B or C.
171+
// This confirms the gossip-only equivocating proposal from B or C has propagated.
172+
// REFACTOR: This is candidate for a "wait until all nodes see a block with these properties" helper in the test context.
173+
const gossipTimeout = test.L2_SLOT_DURATION_IN_S * 4;
174+
await Promise.all(
175+
[nodeB, nodeC, nodeD].map(async (node, idx) => {
176+
const nodeName = ['B', 'C', 'D'][idx];
177+
let observedCoinbase: EthAddress | undefined;
178+
await retryUntil(
179+
async () => {
180+
const block = await node.getBlock('proposed');
181+
if (!block) {
182+
return false;
183+
}
184+
const slot = block.header.globalVariables.slotNumber;
185+
const cb = block.header.globalVariables.coinbase;
186+
if (slot === submissionSlot && (cb.equals(coinbaseB) || cb.equals(coinbaseC))) {
187+
observedCoinbase = cb;
188+
return true;
189+
}
190+
return false;
191+
},
192+
`${nodeName} sees gossip-only proposed block for slot ${submissionSlot}`,
193+
gossipTimeout,
194+
0.5,
195+
);
196+
logger.warn(`Node ${nodeName} observed gossip-only coinbase for slot ${submissionSlot}`, { observedCoinbase });
197+
}),
198+
);
199+
200+
// Now wait until each of B, C, D has a checkpointed block for submissionSlot with coinbaseA.
201+
// This confirms A's L1-confirmed checkpoint has overridden the gossip-only proposal.
202+
// REFACTOR: This is candidate for a "wait until all nodes see a block with these properties" helper in the test context.
203+
const overrideTimeout = test.L2_SLOT_DURATION_IN_S * 4;
204+
logger.warn(`Waiting for L1-sync override on B, C, D (timeout=${overrideTimeout}s)`);
205+
await Promise.all(
206+
[nodeB, nodeC, nodeD].map(async (node, idx) => {
207+
const nodeName = ['B', 'C', 'D'][idx];
208+
await retryUntil(
209+
async () => {
210+
const block = await node.getBlock('checkpointed');
211+
if (!block) {
212+
return false;
213+
}
214+
const slot = block.header.globalVariables.slotNumber;
215+
const cb = block.header.globalVariables.coinbase;
216+
return slot >= submissionSlot && cb.equals(coinbaseA);
217+
},
218+
`${nodeName} checkpointed block for slot ${submissionSlot} with coinbaseA`,
219+
overrideTimeout,
220+
0.5,
221+
);
222+
}),
223+
);
224+
225+
// Assert no spurious failures on B, C.
226+
// Node A (index 2) generates lots of proposer-rollup-check-failed noise because it has
227+
// skipIncomingProposals set and can't build a valid checkpoint for slot 2.
228+
// Nodes B (index 3) and C (index 4) generate checkpoint-publish-failed at the submission slot
229+
// because skipPublishingCheckpointsPercent: 100 causes their publish to be intentionally skipped.
230+
const observerFailEvents = failEvents.filter(
231+
e =>
232+
e.sequencerIndex !== 2 && // node A
233+
!(e.type === 'proposer-rollup-check-failed' && e.reason === 'Rollup contract check failed') &&
234+
!(e.type === 'checkpoint-publish-failed' && e.slot === submissionSlot), // expected skip-publish from B/C
235+
);
236+
if (observerFailEvents.length > 0) {
237+
logger.error('Unexpected fail events on observer sequencers', observerFailEvents);
238+
}
239+
expect(observerFailEvents).toEqual([]);
240+
241+
// Then heal. Stop A, re-enable checkpoint publishing on B and C, expect chain to advance.
242+
logger.warn('Stopping node A and re-enabling publishing on B and C');
243+
await tryStop(nodes[0], logger);
244+
245+
const baseline = test.monitor.checkpointNumber;
246+
logger.warn(`Checkpoint baseline after equivocation: ${baseline}`);
247+
248+
await nodes[1].setConfig({ skipPublishingCheckpointsPercent: 0 });
249+
await nodes[2].setConfig({ skipPublishingCheckpointsPercent: 0 });
250+
251+
const healTarget = CheckpointNumber(baseline + 2);
252+
const healTimeout = test.L2_SLOT_DURATION_IN_S * 8;
253+
logger.warn(`Waiting for checkpoint ${healTarget} (timeout=${healTimeout}s)`);
254+
await test.waitUntilCheckpointNumber(healTarget, healTimeout);
255+
256+
expect(test.monitor.checkpointNumber).toBeGreaterThanOrEqual(healTarget);
257+
logger.warn(`Network healed: checkpoint ${test.monitor.checkpointNumber}`);
258+
259+
// REFACTOR: This is candidate for a "wait until all nodes sync to a chain tip with these properties" helper in the test context.
260+
await Promise.all(
261+
[nodeB, nodeC, nodeD].map((node, idx) =>
262+
retryUntil(
263+
async () => {
264+
const tips = await node.getL2Tips();
265+
return tips.checkpointed.checkpoint.number >= healTarget;
266+
},
267+
`${'BCD'[idx]} synced to checkpoint ${healTarget}`,
268+
healTimeout,
269+
0.5,
270+
),
271+
),
272+
);
273+
274+
// TODO(A-980): assert the equivocating proposer of the first slot is eventually slashed
275+
// for the DUPLICATE_PROPOSAL offense. Slasher is currently disabled in the harness
276+
// (slasherEnabled: false) and enabling it requires plumbing offense submission and
277+
// waiting for the slasher's offense window.
278+
});
279+
});

yarn-project/p2p/src/config.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -217,6 +217,9 @@ export interface P2PConfig
217217

218218
/** Minimum percentage fee increase required to replace an existing tx via RPC (0 = no bump). */
219219
priceBumpPercentage: bigint;
220+
221+
/** Drop incoming block and checkpoint proposals at the libp2p dispatch layer (for testing only) */
222+
skipIncomingProposals?: boolean;
220223
}
221224

222225
export const DEFAULT_P2P_PORT = 40400;
@@ -519,6 +522,10 @@ export const p2pConfigMappings: ConfigMappingsType<P2PConfig> = {
519522
'Broadcast block proposals even when a conflicting proposal for the same slot already exists in the pool (for testing purposes only).',
520523
...booleanConfigHelper(false),
521524
},
525+
skipIncomingProposals: {
526+
description: 'Drop incoming block and checkpoint proposals at the libp2p dispatch layer (for testing only)',
527+
...booleanConfigHelper(false),
528+
},
522529
minTxPoolAgeMs: {
523530
env: 'P2P_MIN_TX_POOL_AGE_MS',
524531
description: 'Minimum age (ms) a transaction must have been in the pool before it is eligible for block building.',

yarn-project/p2p/src/services/libp2p/libp2p_service.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import type { EpochCacheInterface } from '@aztec/epoch-cache';
22
import { BlockNumber, type SlotNumber } from '@aztec/foundation/branded-types';
3-
import { maxBy } from '@aztec/foundation/collection';
3+
import { maxBy, merge } from '@aztec/foundation/collection';
44
import { type Logger, createLibp2pComponentLogger, createLogger } from '@aztec/foundation/log';
55
import { RunningPromise } from '@aztec/foundation/running-promise';
66
import { Timer } from '@aztec/foundation/timer';
@@ -271,8 +271,9 @@ export class LibP2PService extends WithTracer implements P2PService {
271271
};
272272
}
273273

274-
public updateConfig(config: Partial<P2PReqRespConfig>) {
274+
public updateConfig(config: Partial<P2PReqRespConfig & Pick<P2PConfig, 'skipIncomingProposals'>>) {
275275
this.reqresp.updateConfig(config);
276+
this.config = merge(this.config, config);
276277
}
277278

278279
/**
@@ -849,6 +850,15 @@ export class LibP2PService extends WithTracer implements P2PService {
849850

850851
// Process the message, optionally within a linked span for trace propagation
851852
const processMessage = async () => {
853+
if (
854+
this.config.skipIncomingProposals &&
855+
(msg.topic === this.topicStrings[TopicType.block_proposal] ||
856+
msg.topic === this.topicStrings[TopicType.checkpoint_proposal])
857+
) {
858+
this.logger.warn(`Ignoring incoming proposal (skipIncomingProposals is set)`, { topic: msg.topic });
859+
this.node.services.pubsub.reportMessageValidationResult(msgId, source.toString(), TopicValidatorResult.Ignore);
860+
return;
861+
}
852862
if (msg.topic === this.topicStrings[TopicType.tx]) {
853863
await this.handleGossipedTx(p2pMessage.payload, msgId, source);
854864
} else if (msg.topic === this.topicStrings[TopicType.checkpoint_attestation]) {

yarn-project/sequencer-client/src/config.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,10 @@ export const sequencerConfigMappings: ConfigMappingsType<SequencerConfig> = {
229229
description: 'Percent probability (0 - 100) of sequencer skipping checkpoint publishing (testing only)',
230230
...numberConfigHelper(DefaultSequencerConfig.skipPublishingCheckpointsPercent),
231231
},
232+
skipBroadcastProposals: {
233+
description: 'Skip broadcasting checkpoint and block proposals via gossipsub when proposer (for testing only)',
234+
...booleanConfigHelper(false),
235+
},
232236
...pickConfigMappings(p2pConfigMappings, ['txPublicSetupAllowListExtend']),
233237
};
234238

yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -640,8 +640,10 @@ export class CheckpointProposalJob implements Traceable {
640640
);
641641

642642
const blockProposedAt = this.dateProvider.now();
643-
await this.p2pClient.broadcastCheckpointProposal(proposal);
644-
this.checkpointMetrics.noteCheckpointBroadcast(this.dateProvider.now());
643+
if (!this.config.skipBroadcastProposals) {
644+
await this.p2pClient.broadcastCheckpointProposal(proposal);
645+
this.checkpointMetrics.noteCheckpointBroadcast(this.dateProvider.now());
646+
}
645647

646648
// Return immediately after broadcast — attestation collection happens in the background
647649
return { checkpoint, proposal, blockProposedAt };
@@ -762,7 +764,9 @@ export class CheckpointProposalJob implements Traceable {
762764
}
763765

764766
// Once we have a signed proposal and the archiver agreed with our proposed block, then we broadcast it.
765-
proposal && (await this.p2pClient.broadcastProposal(proposal));
767+
if (proposal && !this.config.skipBroadcastProposals) {
768+
await this.p2pClient.broadcastProposal(proposal);
769+
}
766770

767771
// Wait until the next block's start time
768772
await this.waitUntilNextSubslot(timingInfo.deadline);

yarn-project/stdlib/src/interfaces/aztec-node-admin.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,8 @@ export type AztecNodeAdminConfig = Omit<ValidatorClientFullConfig, 'l1Contracts'
8181
'archiverPollingIntervalMS' | 'archiverBatchSize' | 'skipValidateCheckpointAttestations'
8282
> & {
8383
maxPendingTxCount: number;
84+
// Keep in sync with P2PConfig.skipIncomingProposals (circular dep prevents Pick<P2PConfig, ...> here)
85+
skipIncomingProposals?: boolean;
8486
};
8587

8688
export const AztecNodeAdminConfigSchema = SequencerConfigSchema.merge(ProverConfigSchema)
@@ -93,7 +95,7 @@ export const AztecNodeAdminConfigSchema = SequencerConfigSchema.merge(ProverConf
9395
skipValidateCheckpointAttestations: true,
9496
}),
9597
)
96-
.merge(z.object({ maxPendingTxCount: z.number() }));
98+
.merge(z.object({ maxPendingTxCount: z.number(), skipIncomingProposals: z.boolean().optional() }));
9799

98100
export const AztecNodeAdminApiSchema: ApiSchemaFor<AztecNodeAdmin> = {
99101
getConfig: z.function().returns(AztecNodeAdminConfigSchema),

0 commit comments

Comments
 (0)