|
| 1 | +import type { AztecNodeService } from '@aztec/aztec-node'; |
| 2 | +import { EthAddress } from '@aztec/aztec.js/addresses'; |
| 3 | +import { Fr } from '@aztec/aztec.js/fields'; |
| 4 | +import type { Logger } from '@aztec/aztec.js/log'; |
| 5 | +import { asyncMap } from '@aztec/foundation/async-map'; |
| 6 | +import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; |
| 7 | +import { times } from '@aztec/foundation/collection'; |
| 8 | +import { SecretValue } from '@aztec/foundation/config'; |
| 9 | +import { retryUntil } from '@aztec/foundation/retry'; |
| 10 | +import { bufferToHex } from '@aztec/foundation/string'; |
| 11 | +import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; |
| 12 | +import { tryStop } from '@aztec/stdlib/interfaces/server'; |
| 13 | + |
| 14 | +import { jest } from '@jest/globals'; |
| 15 | +import { privateKeyToAccount } from 'viem/accounts'; |
| 16 | + |
| 17 | +import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; |
| 18 | +import { EpochsTestContext } from './epochs_test.js'; |
| 19 | + |
| 20 | +jest.setTimeout(1000 * 60 * 15); |
| 21 | + |
| 22 | +const NODE_COUNT = 4; |
| 23 | + |
| 24 | +/** |
| 25 | + * E2E test for the equivocation recovery scenario under proposer pipelining. |
| 26 | + * |
| 27 | + * Two conflicting checkpoint proposals are gossiped during the same slot: |
| 28 | + * - Node A (holds all 4 validator keys) publishes the "real" checkpoint to L1 |
| 29 | + * but never broadcasts via gossipsub (`skipBroadcastProposals + skipIncomingProposals`). |
| 30 | + * - The "X" node (B or C, whichever holds the slot proposer's key) broadcasts an |
| 31 | + * alternative checkpoint that reaches B/C/D via gossipsub but never lands on L1 |
| 32 | + * (`skipPublishingCheckpointsPercent: 100`). |
| 33 | + * |
| 34 | + * The test verifies that L1 sync overrides the gossip-only proposal on all observer |
| 35 | + * nodes (B, C, D) once A's L1-confirmed checkpoint propagates via the archiver. |
| 36 | + */ |
| 37 | +describe('e2e_epochs/epochs_equivocation', () => { |
| 38 | + let logger: Logger; |
| 39 | + let test: EpochsTestContext; |
| 40 | + let nodes: AztecNodeService[]; |
| 41 | + |
| 42 | + afterEach(async () => { |
| 43 | + jest.restoreAllMocks(); |
| 44 | + await test?.teardown(); |
| 45 | + }); |
| 46 | + |
| 47 | + it('L1-confirmed checkpoint overrides gossip-only equivocating proposal', async () => { |
| 48 | + // Build 4 validators (V1..V4) using getPrivateKeyFromIndex(i+3), same convention as other epoch tests. |
| 49 | + const validators = times(NODE_COUNT, i => { |
| 50 | + const privateKey = bufferToHex(getPrivateKeyFromIndex(i + 3)!); |
| 51 | + const attester = EthAddress.fromString(privateKeyToAccount(privateKey).address); |
| 52 | + return { attester, withdrawer: attester, privateKey, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) }; |
| 53 | + }); |
| 54 | + |
| 55 | + // Timing calculation for 3 blocks per checkpoint with 8s sub-slots: |
| 56 | + // - initializationOffset = 0.5s (test mode, ethereumSlotDuration < 8) |
| 57 | + // - 3 blocks x 8s = 24s |
| 58 | + // - checkpointFinalization = 0.5s (assemble) + 0 (p2p in test) + 2s (L1 publish) = 2.5s |
| 59 | + // - finalBlockDuration = 8s (re-execution) |
| 60 | + // - Total: 0.5 + 24 + 8 + 2.5 = 35s => use 36s |
| 61 | + test = await EpochsTestContext.setup({ |
| 62 | + numberOfAccounts: 0, |
| 63 | + initialValidators: validators, |
| 64 | + enableProposerPipelining: true, |
| 65 | + inboxLag: 2, |
| 66 | + mockGossipSubNetwork: true, |
| 67 | + disableAnvilTestWatcher: true, |
| 68 | + startProverNode: false, |
| 69 | + aztecEpochDuration: 4, |
| 70 | + aztecProofSubmissionEpochs: 1024, |
| 71 | + enforceTimeTable: true, |
| 72 | + ethereumSlotDuration: 6, |
| 73 | + aztecSlotDuration: 36, |
| 74 | + blockDurationMs: 8000, |
| 75 | + attestationPropagationTime: 0.5, |
| 76 | + l1PublishingTime: 2, |
| 77 | + aztecTargetCommitteeSize: 4, |
| 78 | + skipInitialSequencer: true, |
| 79 | + }); |
| 80 | + |
| 81 | + logger = test.logger; |
| 82 | + |
| 83 | + // We set different coinbase addresses so different nodes produce different blocks |
| 84 | + const coinbaseA = EthAddress.fromNumber(0xa); |
| 85 | + const coinbaseB = EthAddress.fromNumber(0xb); |
| 86 | + const coinbaseC = EthAddress.fromNumber(0xc); |
| 87 | + |
| 88 | + // The private keys held by each node: |
| 89 | + // A: all 4 keys → self-attests with all validators, reaches quorum without inbound attestations |
| 90 | + // B: V1 + V2 |
| 91 | + // C: V3 + V4 |
| 92 | + // D: no validator keys (RPC-only observer) |
| 93 | + const keysA = validators.map(v => v.privateKey as `0x${string}`); |
| 94 | + const keysB = [validators[0].privateKey, validators[1].privateKey] as `0x${string}`[]; |
| 95 | + const keysC = [validators[2].privateKey, validators[3].privateKey] as `0x${string}`[]; |
| 96 | + |
| 97 | + // All sequencers start with dontStartSequencer so we can warp the clock first. |
| 98 | + nodes = await asyncMap( |
| 99 | + [ |
| 100 | + { |
| 101 | + keys: keysA, |
| 102 | + coinbase: coinbaseA, |
| 103 | + extraOpts: { skipIncomingProposals: true, skipBroadcastProposals: true }, |
| 104 | + }, |
| 105 | + { |
| 106 | + keys: keysB, |
| 107 | + coinbase: coinbaseB, |
| 108 | + extraOpts: { skipPublishingCheckpointsPercent: 100 }, |
| 109 | + }, |
| 110 | + { |
| 111 | + keys: keysC, |
| 112 | + coinbase: coinbaseC, |
| 113 | + extraOpts: { skipPublishingCheckpointsPercent: 100 }, |
| 114 | + }, |
| 115 | + ], |
| 116 | + ({ keys, coinbase, extraOpts }) => |
| 117 | + test.createValidatorNode(keys, { |
| 118 | + dontStartSequencer: true, |
| 119 | + coinbase, |
| 120 | + buildCheckpointIfEmpty: true, |
| 121 | + minTxsPerBlock: 0, |
| 122 | + ...extraOpts, |
| 123 | + }), |
| 124 | + ); |
| 125 | + |
| 126 | + // Node D: non-validator observer node |
| 127 | + const nodeD = await test.createNonValidatorNode({ buildCheckpointIfEmpty: true, minTxsPerBlock: 0 }); |
| 128 | + nodes.push(nodeD); |
| 129 | + const [nodeB, nodeC] = nodes.slice(1); |
| 130 | + |
| 131 | + logger.warn('All nodes created', { nodes: nodes.length }); |
| 132 | + |
| 133 | + // Determine the next proposer slot by scanning upcoming slots. |
| 134 | + // Since A holds all 4 keys and B/C each hold 2, the slot proposer is always held by A |
| 135 | + // and exactly one of B or C. We identify which one (X) and use its coinbase in assertions. |
| 136 | + const { slot: currentSlot } = test.epochCache.getEpochAndSlotNow(); |
| 137 | + // Pick a target slot 2 ahead so there's room for the pipelining build window to engage. |
| 138 | + // With pipelining, the sequencer builds slot (targetSlot+1) while the clock is at targetSlot, |
| 139 | + // so the proposer we care about is for targetSlot+1 (the submission slot). |
| 140 | + const targetSlot = SlotNumber(currentSlot + 2); |
| 141 | + const submissionSlot = SlotNumber(targetSlot + 1); |
| 142 | + |
| 143 | + const attesterAddresses = validators.map(v => EthAddress.fromString(privateKeyToAccount(v.privateKey).address)); |
| 144 | + logger.warn('Validator attester addresses', { |
| 145 | + V1: attesterAddresses[0], |
| 146 | + V2: attesterAddresses[1], |
| 147 | + V3: attesterAddresses[2], |
| 148 | + V4: attesterAddresses[3], |
| 149 | + }); |
| 150 | + logger.warn('Validator-to-node assignment', { A: 'V1,V2,V3,V4', B: 'V1,V2', C: 'V3,V4', D: 'none' }); |
| 151 | + |
| 152 | + const proposerAttester = await test.epochCache.getProposerAttesterAddressInSlot(submissionSlot); |
| 153 | + if (!proposerAttester) { |
| 154 | + throw new Error(`No proposer found for slot ${submissionSlot}`); |
| 155 | + } |
| 156 | + logger.warn(`Expected proposer for submission slot`, { submissionSlot, proposerAttester }); |
| 157 | + |
| 158 | + // Warp to one L1 slot before the target L2 slot so pipelining's build window engages. |
| 159 | + const slotStartTimestamp = getTimestampForSlot(targetSlot, test.constants); |
| 160 | + const warpTo = slotStartTimestamp - BigInt(test.L1_BLOCK_TIME_IN_S); |
| 161 | + logger.warn(`Warping to L1 timestamp ${warpTo} (one L1 slot before L2 slot ${targetSlot})`); |
| 162 | + await test.context.cheatCodes.eth.warp(Number(warpTo), { resetBlockInterval: true }); |
| 163 | + |
| 164 | + // Start all sequencers now that the clock is warped. |
| 165 | + const sequencers = nodes.slice(0, 3).map(n => n.getSequencer()!); |
| 166 | + const { failEvents } = test.watchSequencerEvents(sequencers, i => ({ validator: ['A', 'B', 'C'][i] })); |
| 167 | + await Promise.all(sequencers.map(s => s.start())); |
| 168 | + logger.warn('All sequencers started'); |
| 169 | + |
| 170 | + // Wait until each of B, C, D sees a proposed block for submissionSlot with coinbase B or C. |
| 171 | + // This confirms the gossip-only equivocating proposal from B or C has propagated. |
| 172 | + // REFACTOR: This is candidate for a "wait until all nodes see a block with these properties" helper in the test context. |
| 173 | + const gossipTimeout = test.L2_SLOT_DURATION_IN_S * 4; |
| 174 | + await Promise.all( |
| 175 | + [nodeB, nodeC, nodeD].map(async (node, idx) => { |
| 176 | + const nodeName = ['B', 'C', 'D'][idx]; |
| 177 | + let observedCoinbase: EthAddress | undefined; |
| 178 | + await retryUntil( |
| 179 | + async () => { |
| 180 | + const block = await node.getBlock('proposed'); |
| 181 | + if (!block) { |
| 182 | + return false; |
| 183 | + } |
| 184 | + const slot = block.header.globalVariables.slotNumber; |
| 185 | + const cb = block.header.globalVariables.coinbase; |
| 186 | + if (slot === submissionSlot && (cb.equals(coinbaseB) || cb.equals(coinbaseC))) { |
| 187 | + observedCoinbase = cb; |
| 188 | + return true; |
| 189 | + } |
| 190 | + return false; |
| 191 | + }, |
| 192 | + `${nodeName} sees gossip-only proposed block for slot ${submissionSlot}`, |
| 193 | + gossipTimeout, |
| 194 | + 0.5, |
| 195 | + ); |
| 196 | + logger.warn(`Node ${nodeName} observed gossip-only coinbase for slot ${submissionSlot}`, { observedCoinbase }); |
| 197 | + }), |
| 198 | + ); |
| 199 | + |
| 200 | + // Now wait until each of B, C, D has a checkpointed block for submissionSlot with coinbaseA. |
| 201 | + // This confirms A's L1-confirmed checkpoint has overridden the gossip-only proposal. |
| 202 | + // REFACTOR: This is candidate for a "wait until all nodes see a block with these properties" helper in the test context. |
| 203 | + const overrideTimeout = test.L2_SLOT_DURATION_IN_S * 4; |
| 204 | + logger.warn(`Waiting for L1-sync override on B, C, D (timeout=${overrideTimeout}s)`); |
| 205 | + await Promise.all( |
| 206 | + [nodeB, nodeC, nodeD].map(async (node, idx) => { |
| 207 | + const nodeName = ['B', 'C', 'D'][idx]; |
| 208 | + await retryUntil( |
| 209 | + async () => { |
| 210 | + const block = await node.getBlock('checkpointed'); |
| 211 | + if (!block) { |
| 212 | + return false; |
| 213 | + } |
| 214 | + const slot = block.header.globalVariables.slotNumber; |
| 215 | + const cb = block.header.globalVariables.coinbase; |
| 216 | + return slot >= submissionSlot && cb.equals(coinbaseA); |
| 217 | + }, |
| 218 | + `${nodeName} checkpointed block for slot ${submissionSlot} with coinbaseA`, |
| 219 | + overrideTimeout, |
| 220 | + 0.5, |
| 221 | + ); |
| 222 | + }), |
| 223 | + ); |
| 224 | + |
| 225 | + // Assert no spurious failures on B, C. |
| 226 | + // Node A (index 2) generates lots of proposer-rollup-check-failed noise because it has |
| 227 | + // skipIncomingProposals set and can't build a valid checkpoint for slot 2. |
| 228 | + // Nodes B (index 3) and C (index 4) generate checkpoint-publish-failed at the submission slot |
| 229 | + // because skipPublishingCheckpointsPercent: 100 causes their publish to be intentionally skipped. |
| 230 | + const observerFailEvents = failEvents.filter( |
| 231 | + e => |
| 232 | + e.sequencerIndex !== 2 && // node A |
| 233 | + !(e.type === 'proposer-rollup-check-failed' && e.reason === 'Rollup contract check failed') && |
| 234 | + !(e.type === 'checkpoint-publish-failed' && e.slot === submissionSlot), // expected skip-publish from B/C |
| 235 | + ); |
| 236 | + if (observerFailEvents.length > 0) { |
| 237 | + logger.error('Unexpected fail events on observer sequencers', observerFailEvents); |
| 238 | + } |
| 239 | + expect(observerFailEvents).toEqual([]); |
| 240 | + |
| 241 | + // Then heal. Stop A, re-enable checkpoint publishing on B and C, expect chain to advance. |
| 242 | + logger.warn('Stopping node A and re-enabling publishing on B and C'); |
| 243 | + await tryStop(nodes[0], logger); |
| 244 | + |
| 245 | + const baseline = test.monitor.checkpointNumber; |
| 246 | + logger.warn(`Checkpoint baseline after equivocation: ${baseline}`); |
| 247 | + |
| 248 | + await nodes[1].setConfig({ skipPublishingCheckpointsPercent: 0 }); |
| 249 | + await nodes[2].setConfig({ skipPublishingCheckpointsPercent: 0 }); |
| 250 | + |
| 251 | + const healTarget = CheckpointNumber(baseline + 2); |
| 252 | + const healTimeout = test.L2_SLOT_DURATION_IN_S * 8; |
| 253 | + logger.warn(`Waiting for checkpoint ${healTarget} (timeout=${healTimeout}s)`); |
| 254 | + await test.waitUntilCheckpointNumber(healTarget, healTimeout); |
| 255 | + |
| 256 | + expect(test.monitor.checkpointNumber).toBeGreaterThanOrEqual(healTarget); |
| 257 | + logger.warn(`Network healed: checkpoint ${test.monitor.checkpointNumber}`); |
| 258 | + |
| 259 | + // REFACTOR: This is candidate for a "wait until all nodes sync to a chain tip with these properties" helper in the test context. |
| 260 | + await Promise.all( |
| 261 | + [nodeB, nodeC, nodeD].map((node, idx) => |
| 262 | + retryUntil( |
| 263 | + async () => { |
| 264 | + const tips = await node.getL2Tips(); |
| 265 | + return tips.checkpointed.checkpoint.number >= healTarget; |
| 266 | + }, |
| 267 | + `${'BCD'[idx]} synced to checkpoint ${healTarget}`, |
| 268 | + healTimeout, |
| 269 | + 0.5, |
| 270 | + ), |
| 271 | + ), |
| 272 | + ); |
| 273 | + |
| 274 | + // TODO(A-980): assert the equivocating proposer of the first slot is eventually slashed |
| 275 | + // for the DUPLICATE_PROPOSAL offense. Slasher is currently disabled in the harness |
| 276 | + // (slasherEnabled: false) and enabling it requires plumbing offense submission and |
| 277 | + // waiting for the slasher's offense window. |
| 278 | + }); |
| 279 | +}); |
0 commit comments