diff --git a/.github/workflows/metrics-deploy.yml b/.github/workflows/metrics-deploy.yml index ae99d66aa728..9ccc2ad2a555 100644 --- a/.github/workflows/metrics-deploy.yml +++ b/.github/workflows/metrics-deploy.yml @@ -129,10 +129,6 @@ jobs: echo "Terraform state bucket already exists" fi - - name: Import Dashboard - working-directory: ./spartan/metrics - run: ./copy-dashboard.sh - - name: Setup Terraform uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 with: diff --git a/bootstrap.sh b/bootstrap.sh index b9bc7bacff0e..e8ad47a71f29 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -785,7 +785,7 @@ case "$cmd" in ;; "ci-network-bench-10tps") # Args: [docker_image] - # Deploys bench-10tps and runs the 38-min sustained 10 TPS benchmark. + # Deploys bench-10tps and runs the 10-min sustained 10 TPS benchmark. # Cleanup is done separately via ci-network-teardown. export CI=1 env_file="${1:?env_file is required}" diff --git a/ci.sh b/ci.sh index ee31f6309d4a..3c977add597e 100755 --- a/ci.sh +++ b/ci.sh @@ -268,7 +268,7 @@ case "$cmd" in ;; network-bench-10tps) # Args: [docker_image] - # Deploys the bench-10tps network and runs the 38-min 10 TPS benchmark. + # Deploys the bench-10tps network and runs the 10-min 10 TPS benchmark. export CI_DASHBOARD="network" export JOB_ID="x-${2:?namespace is required}-network-bench-10tps" CPUS=16 export INSTANCE_POSTFIX="n-bench-10tps" diff --git a/docs/docs-developers/docs/resources/migration_notes.md b/docs/docs-developers/docs/resources/migration_notes.md index 21177ac51407..ef7febbaeef5 100644 --- a/docs/docs-developers/docs/resources/migration_notes.md +++ b/docs/docs-developers/docs/resources/migration_notes.md @@ -36,6 +36,58 @@ The wallet SDK now supplies the default sender-for-tags from the transaction's ` The save/restore idiom previously used in account-contract constructors (`get` → `set(self.address)` → work → `set(prev)`) is also no longer needed and has been removed: the override never leaks out of the constructor, so there is nothing to restore. +### [Aztec Node] Unified `getBlock` / `getCheckpoint` RPC API + +The Aztec Node JSON-RPC surface for fetching blocks and checkpoints has been consolidated. The unified `getBlock` and `getCheckpoint` methods return uniform `BlockResponse` / `CheckpointResponse` shapes. The extra fields a caller cares about (tx bodies, L1 publish info, committee attestations, nested blocks) are now controlled by an `options` argument rather than by picking the right method. `getBlocks` and `getCheckpoints` retain their names but now return the new response shapes. + +**Removed methods:** + +| Removed | Replacement | +|---|---| +| `getBlockByHash(hash)` | `getBlock(hash)` or `getBlock({ hash })` | +| `getBlockByArchive(archive)` | `getBlock({ archive })` | +| `getBlockHeaderByArchive(archive)` | `getBlock({ archive }).then(r => r?.header)` | +| `getProvenBlockNumber()` | `getBlockNumber('proven')` | +| `getCheckpointedBlockNumber()` | `getBlockNumber('checkpointed')` | + +**Deprecated but still present** (scheduled for removal once internal consumers of the archiver shape are rewired): `getL2Tips` (use `getChainTips`), `getBlockHeader` (use `getBlock(param).then(r => r?.header)`), `getCheckpointedBlocks` (use `getBlocks(from, limit, { includeL1PublishInfo: true, includeAttestations: true })`), `getCheckpointsDataForEpoch` (use `getCheckpoints(from, limit)` over the epoch's checkpoint range). Do not adopt these in new code. + +**New response shapes:** `BlockResponse` always carries `header`, `archive`, `hash`, `number`, `checkpointNumber`, and `indexWithinCheckpoint`. `body`, `l1` (an `L1PublishInfo` discriminated union), and `attestations` are present only when the matching include option is set. `CheckpointResponse` mirrors this for checkpoints, with `blocks` gated on `includeBlocks`, and always carries `feeAssetPriceModifier` as a base field. The response types are generic over the options object, so passing a literal `{ includeTransactions: true }` narrows the return type and `response.body` becomes non-optional. + +**Nested blocks on `getCheckpoint`:** only `includeTransactions` is forwarded to the blocks embedded by `includeBlocks: true`. `includeL1PublishInfo` and `includeAttestations` on a checkpoint request attach L1 / attestation data to the checkpoint itself, not to its nested blocks. + +**Return type changes for `getBlocks` / `getCheckpoints`:** the return type is now `BlockResponse[]` / `CheckpointResponse[]` instead of `L2Block[]` / `PublishedCheckpoint[]`. Callers that previously consumed fields of `L2Block` (e.g. `.body`) must now opt in via `{ includeTransactions: true }`; callers that consumed `PublishedCheckpoint.checkpoint.blocks` must opt in via `{ includeBlocks: true }`. + +**Migration for wallet/SDK consumers (`@aztec/aztec.js`, `@aztec/wallet-sdk`):** + +```diff +- const block = await node.getBlockByHash(hash); ++ const block = await node.getBlock(hash, { includeTransactions: true }); + +- const archiveBlock = await node.getBlockByArchive(archive); ++ const archiveBlock = await node.getBlock({ archive }, { includeTransactions: true }); + +- const provenNumber = await node.getProvenBlockNumber(); ++ const provenNumber = await node.getBlockNumber('proven'); + +- const checkpointedNumber = await node.getCheckpointedBlockNumber(); ++ const checkpointedNumber = await node.getBlockNumber('checkpointed'); + +- const tips = await node.getL2Tips(); ++ const tips = await node.getChainTips(); +``` + +`getBlockHeader`, `getCheckpointedBlocks`, `getCheckpointsDataForEpoch`, and `getL2Tips` continue to work in this release but are deprecated; migrate to the replacements above. + +**Chain-tip selectors:** `getBlockNumber` and `getCheckpointNumber` now accept an optional `ChainTip` argument (`'proposed' | 'checkpointed' | 'proven' | 'finalized'`). Note the semantic difference: on the block side `'proposed'` means the latest proposed block (chain head), whereas on the checkpoint side `'proposed'` resolves to the latest L1-confirmed checkpoint. Pre-L1-confirmation checkpoints are not exposed over RPC. + +**Block parameter variants:** `BlockParameter` now also accepts a block hash, an archive root, and chain-tip names. The existing `number | 'latest'` forms continue to work — `'latest'` is an alias for `'proposed'`. + +**Impact**: Source changes are required anywhere the removed methods are called. Type changes are required anywhere `L2Block` / `BlockHeader` / `CheckpointedL2Block` were consumed from the RPC — those call sites now receive `BlockResponse` / `CheckpointResponse` and must request the fields they need via `options`. Production nodes will reject JSON-RPC calls to the removed method names. + +### [Aztec Node] `feeAssetPriceModifier` now correctly populated on confirmed checkpoints + +Confirmed checkpoints previously reported `feeAssetPriceModifier = 0n` regardless of the value observed on L1, because the archiver dropped the field on checkpoint confirmation. The field is now persisted and returned correctly on `CheckpointResponse`. Any wallet or indexer logic that special-cased `0n` as a sentinel for "no modifier" will need to be updated; it is now a valid value in its own right. ### [CLI] `aztec-up` no longer exposes transitive npm bins on PATH diff --git a/docs/examples/ts/aave_bridge/index.ts b/docs/examples/ts/aave_bridge/index.ts index 760d77cf58dc..9fe9353f2768 100644 --- a/docs/examples/ts/aave_bridge/index.ts +++ b/docs/examples/ts/aave_bridge/index.ts @@ -258,13 +258,13 @@ if (!exitReceipt.blockNumber) { } const exitBlockNumber = exitReceipt.blockNumber; console.log("Waiting for block to be proven..."); -let provenBlockNumber = await node.getProvenBlockNumber(); +let provenBlockNumber = await node.getBlockNumber('proven'); while (provenBlockNumber < exitBlockNumber) { console.log( ` Waiting... (proven: ${provenBlockNumber}, needed: ${exitBlockNumber})`, ); await new Promise((resolve) => setTimeout(resolve, 10000)); - provenBlockNumber = await node.getProvenBlockNumber(); + provenBlockNumber = await node.getBlockNumber('proven'); } console.log("Block proven!\n"); diff --git a/docs/examples/ts/example_swap/index.ts b/docs/examples/ts/example_swap/index.ts index f8e30c7c9a1b..8838512e7b4f 100644 --- a/docs/examples/ts/example_swap/index.ts +++ b/docs/examples/ts/example_swap/index.ts @@ -384,13 +384,13 @@ console.log("✓ WETH transferred to bridge for swap\n"); // docs:start:wait_for_proof console.log("Waiting for block to be proven...\n"); -let provenBlockNumber = await node.getProvenBlockNumber(); +let provenBlockNumber = await node.getBlockNumber('proven'); while (provenBlockNumber < swapReceipt.blockNumber!) { console.log( ` Waiting... (proven: ${provenBlockNumber}, needed: ${swapReceipt.blockNumber})`, ); await new Promise((resolve) => setTimeout(resolve, 10000)); - provenBlockNumber = await node.getProvenBlockNumber(); + provenBlockNumber = await node.getBlockNumber('proven'); } console.log("Block proven!\n"); diff --git a/docs/examples/ts/token_bridge/index.ts b/docs/examples/ts/token_bridge/index.ts index 728f285164bc..997c3cadad62 100644 --- a/docs/examples/ts/token_bridge/index.ts +++ b/docs/examples/ts/token_bridge/index.ts @@ -289,7 +289,7 @@ const msgLeaf = computeL2ToL1MessageHash({ console.log("Waiting for block to be proven..."); console.log(` Exit block number: ${exitReceipt.blockNumber}`); -let provenBlockNumber = await node.getProvenBlockNumber(); +let provenBlockNumber = await node.getBlockNumber('proven'); console.log(` Current proven block: ${provenBlockNumber}`); while (provenBlockNumber < exitReceipt.blockNumber!) { @@ -297,7 +297,7 @@ while (provenBlockNumber < exitReceipt.blockNumber!) { ` Waiting... (proven: ${provenBlockNumber}, needed: ${exitReceipt.blockNumber})`, ); await new Promise((resolve) => setTimeout(resolve, 10000)); // Wait 10 seconds - provenBlockNumber = await node.getProvenBlockNumber(); + provenBlockNumber = await node.getBlockNumber('proven'); } console.log("Block proven!\n"); diff --git a/l1-contracts/gas_benchmark.md b/l1-contracts/gas_benchmark.md index dc616c784d6b..6f68e4ff4503 100644 --- a/l1-contracts/gas_benchmark.md +++ b/l1-contracts/gas_benchmark.md @@ -14,21 +14,22 @@ | Function | Avg Gas | Max Gas | Calldata Size | Calldata Gas | |----------------------|---------|---------|---------------|--------------| -| propose | 195,201 | 221,411 | 932 | 14,912 | -| submitEpochRootProof | 698,976 | 744,776 | 2,820 | 45,120 | -| setupEpoch | 31,965 | 113,616 | - | - | +| propose | 195,988 | 222,201 | 932 | 14,912 | +| submitEpochRootProof | 697,655 | 743,529 | 2,820 | 45,120 | +| setupEpoch | 31,998 | 113,793 | - | - | -**Avg Gas Cost per Second**: 3,331.7 gas/second +**Avg Gas Cost per Second**: 3,341.5 gas/second *Epoch duration*: 0h 38m 24s ## Validators | Function | Avg Gas | Max Gas | Calldata Size | Calldata Gas | |----------------------|---------|---------|---------------|--------------| -| propose | 322,945 | 350,085 | 4,452 | 71,232 | -| submitEpochRootProof | 897,150 | 942,954 | 5,316 | 85,056 | -| aggregate3 | 371,401 | 384,831 | - | - | -| setupEpoch | 46,426 | 547,449 | - | - | +| propose | 324,449 | 351,604 | 4,452 | 71,232 | +| submitEpochRootProof | 896,101 | 941,944 | 5,316 | 85,056 | +| aggregate3 | 373,118 | 386,457 | - | - | +| setupEpoch | 46,459 | 547,626 | - | - | -**Avg Gas Cost per Second**: 5,284.3 gas/second +**Avg Gas Cost per Second**: 5,304.3 gas/second *Epoch duration*: 0h 38m 24s + diff --git a/l1-contracts/gas_benchmark_results.json b/l1-contracts/gas_benchmark_results.json index 48c9c1e86218..45c6a506461d 100644 --- a/l1-contracts/gas_benchmark_results.json +++ b/l1-contracts/gas_benchmark_results.json @@ -2,26 +2,26 @@ "no_validators": { "propose": { "calls": 150, - "min": 181595, - "mean": 195201, - "median": 190979, - "max": 221411, + "min": 182373, + "mean": 195988, + "median": 191762, + "max": 222201, "calldata_size": 932, "calldata_gas": 14912 }, "setupEpoch": { "calls": 150, - "min": 29236, - "mean": 31965, - "median": 29236, - "max": 113616 + "min": 29264, + "mean": 31998, + "median": 29264, + "max": 113793 }, "submitEpochRootProof": { "calls": 4, - "min": 677928, - "mean": 698976, - "median": 686601, - "max": 744776, + "min": 676491, + "mean": 697655, + "median": 685300, + "max": 743529, "calldata_size": 2820, "calldata_gas": 45120 } @@ -29,35 +29,35 @@ "validators": { "propose": { "calls": 150, - "min": 300568, - "mean": 322945, - "median": 322454, - "max": 350085, + "min": 302105, + "mean": 324449, + "median": 323910, + "max": 351604, "calldata_size": 4452, "calldata_gas": 71232 }, "setupEpoch": { "calls": 150, - "min": 29236, - "mean": 46426, - "median": 29236, - "max": 547449 + "min": 29264, + "mean": 46459, + "median": 29264, + "max": 547626 }, "submitEpochRootProof": { "calls": 4, - "min": 876112, - "mean": 897150, - "median": 884767, - "max": 942954, + "min": 874924, + "mean": 896101, + "median": 883769, + "max": 941944, "calldata_size": 5316, "calldata_gas": 85056 }, "aggregate3": { "calls": 55, - "min": 360298, - "mean": 371401, - "median": 371165, - "max": 384831 + "min": 362015, + "mean": 373118, + "median": 372828, + "max": 386457 } } -} +} \ No newline at end of file diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index a04e1e3253f9..281e4badd070 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -550,6 +550,18 @@ contract Rollup is IStaking, IValidatorSelection, IRollup, RollupCore { return STFLib.getStorage().config.feeAssetPortal; } + function getVkTreeRoot() external view override(IRollup) returns (bytes32) { + return STFLib.getStorage().config.vkTreeRoot; + } + + function getProtocolContractsHash() external view override(IRollup) returns (bytes32) { + return STFLib.getStorage().config.protocolContractsHash; + } + + function getEpochProofVerifier() external view override(IRollup) returns (IVerifier) { + return STFLib.getStorage().config.epochProofVerifier; + } + function getRewardDistributor() external view override(IRollup) returns (IRewardDistributor) { return RewardExtLib.getRewardDistributor(); } diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 448b3bcfff0c..f3e67ab100a5 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -232,6 +232,10 @@ interface IRollup is IRollupCore, IHaveVersion { function getInbox() external view returns (IInbox); function getOutbox() external view returns (IOutbox); + function getVkTreeRoot() external view returns (bytes32); + function getProtocolContractsHash() external view returns (bytes32); + function getEpochProofVerifier() external view returns (IVerifier); + function getRewardConfig() external view returns (RewardConfig memory); function getCheckpointReward() external view returns (uint256); } diff --git a/l1-contracts/src/core/libraries/rollup/AttestationLib.sol b/l1-contracts/src/core/libraries/rollup/AttestationLib.sol index f9062c3b8c1b..210243d235fc 100644 --- a/l1-contracts/src/core/libraries/rollup/AttestationLib.sol +++ b/l1-contracts/src/core/libraries/rollup/AttestationLib.sol @@ -3,20 +3,12 @@ pragma solidity ^0.8.27; import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {CoordinationSignatureLib} from "@aztec/core/libraries/rollup/CoordinationSignatureLib.sol"; import {Signature, SignatureLib} from "@aztec/shared/libraries/SignatureLib.sol"; uint256 constant SIGNATURE_LENGTH = 65; // v (1) + r (32) + s (32) uint256 constant ADDRESS_LENGTH = 20; -/** - * @notice The domain separator for the signatures - */ -enum SignatureDomainSeparator { - checkpointProposal, - checkpointAttestation, - attestationsAndSigners -} - // A committee attestation can be made up of a signature and an address. // Committee members that have attested will produce a signature, and if they have not attested, the signature will be // empty and an address provided. @@ -35,6 +27,24 @@ struct CommitteeAttestations { library AttestationLib { using SignatureLib for Signature; + function getAttestationsAndSignersDigest(CommitteeAttestations memory _attestations, address[] memory _signers) + internal + view + returns (bytes32) + { + return getAttestationsAndSignersDigest(_attestations, _signers, address(this)); + } + + function getAttestationsAndSignersDigest( + CommitteeAttestations memory _attestations, + address[] memory _signers, + address _verifyingContract + ) internal view returns (bytes32) { + return CoordinationSignatureLib.attestationsAndSignersDigest( + keccak256(abi.encode(_attestations, _signers)), _verifyingContract + ); + } + /** * @notice Checks if the given CommitteeAttestations is empty * Wll return true if either component is empty as they are needed together. @@ -217,12 +227,4 @@ library AttestationLib { return addresses; } - - function getAttestationsAndSignersDigest(CommitteeAttestations memory _attestations, address[] memory _signers) - internal - pure - returns (bytes32) - { - return keccak256(abi.encode(SignatureDomainSeparator.attestationsAndSigners, _attestations, _signers)); - } } diff --git a/l1-contracts/src/core/libraries/rollup/CoordinationSignatureLib.sol b/l1-contracts/src/core/libraries/rollup/CoordinationSignatureLib.sol new file mode 100644 index 000000000000..6b5f44e7bd49 --- /dev/null +++ b/l1-contracts/src/core/libraries/rollup/CoordinationSignatureLib.sol @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity ^0.8.27; + +library CoordinationSignatureLib { + bytes32 internal constant DOMAIN_TYPEHASH = + keccak256("EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)"); + bytes32 internal constant NAME_HASH = keccak256("Aztec Rollup"); + bytes32 internal constant VERSION_HASH = keccak256("1"); + + bytes32 internal constant BLOCK_PROPOSAL_TYPEHASH = keccak256("BlockProposal(bytes32 payloadHash)"); + bytes32 internal constant CHECKPOINT_PROPOSAL_TYPEHASH = keccak256("CheckpointProposal(bytes32 payloadHash)"); + bytes32 internal constant CHECKPOINT_ATTESTATION_TYPEHASH = keccak256("CheckpointAttestation(bytes32 payloadHash)"); + bytes32 internal constant ATTESTATIONS_AND_SIGNERS_TYPEHASH = + keccak256("AttestationsAndSigners(bytes32 payloadHash)"); + + function domainSeparator() internal view returns (bytes32) { + return domainSeparator(address(this)); + } + + function domainSeparator(address _verifyingContract) internal view returns (bytes32) { + return keccak256(abi.encode(DOMAIN_TYPEHASH, NAME_HASH, VERSION_HASH, block.chainid, _verifyingContract)); + } + + function toTypedDataHash(bytes32 _structHash) internal view returns (bytes32) { + return toTypedDataHash(_structHash, address(this)); + } + + function toTypedDataHash(bytes32 _structHash, address _verifyingContract) internal view returns (bytes32) { + return keccak256(abi.encodePacked(hex"1901", domainSeparator(_verifyingContract), _structHash)); + } + + function blockProposalDigest(bytes32 _payloadHash) internal view returns (bytes32) { + return blockProposalDigest(_payloadHash, address(this)); + } + + function blockProposalDigest(bytes32 _payloadHash, address _verifyingContract) internal view returns (bytes32) { + return toTypedDataHash(keccak256(abi.encode(BLOCK_PROPOSAL_TYPEHASH, _payloadHash)), _verifyingContract); + } + + function checkpointProposalDigest(bytes32 _payloadHash) internal view returns (bytes32) { + return checkpointProposalDigest(_payloadHash, address(this)); + } + + function checkpointProposalDigest(bytes32 _payloadHash, address _verifyingContract) internal view returns (bytes32) { + return toTypedDataHash(keccak256(abi.encode(CHECKPOINT_PROPOSAL_TYPEHASH, _payloadHash)), _verifyingContract); + } + + function checkpointAttestationDigest(bytes32 _payloadHash) internal view returns (bytes32) { + return checkpointAttestationDigest(_payloadHash, address(this)); + } + + function checkpointAttestationDigest(bytes32 _payloadHash, address _verifyingContract) + internal + view + returns (bytes32) + { + return toTypedDataHash(keccak256(abi.encode(CHECKPOINT_ATTESTATION_TYPEHASH, _payloadHash)), _verifyingContract); + } + + function attestationsAndSignersDigest(bytes32 _payloadHash) internal view returns (bytes32) { + return attestationsAndSignersDigest(_payloadHash, address(this)); + } + + function attestationsAndSignersDigest(bytes32 _payloadHash, address _verifyingContract) + internal + view + returns (bytes32) + { + return toTypedDataHash(keccak256(abi.encode(ATTESTATIONS_AND_SIGNERS_TYPEHASH, _payloadHash)), _verifyingContract); + } +} diff --git a/l1-contracts/src/core/libraries/rollup/InvalidateLib.sol b/l1-contracts/src/core/libraries/rollup/InvalidateLib.sol index f9efbd49d186..2db0ba807f5f 100644 --- a/l1-contracts/src/core/libraries/rollup/InvalidateLib.sol +++ b/l1-contracts/src/core/libraries/rollup/InvalidateLib.sol @@ -13,7 +13,6 @@ import {ValidatorSelectionLib} from "@aztec/core/libraries/rollup/ValidatorSelec import {Timestamp, Slot, Epoch, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; import {CompressedSlot, CompressedTimeMath} from "@aztec/shared/libraries/CompressedTimeMath.sol"; import {ECDSA} from "@oz/utils/cryptography/ECDSA.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; /** * @title InvalidateLib @@ -57,7 +56,6 @@ library InvalidateLib { using TimeLib for Epoch; using ChainTipsLib for CompressedChainTips; using AttestationLib for CommitteeAttestations; - using MessageHashUtils for bytes32; using CompressedTimeMath for CompressedSlot; /** @@ -235,7 +233,7 @@ library InvalidateLib { ); // Get the digest of the payload that was signed by the committee - bytes32 digest = checkpointLog.payloadDigest.toEthSignedMessageHash(); + bytes32 digest = checkpointLog.payloadDigest; return (digest, committeeSize); } diff --git a/l1-contracts/src/core/libraries/rollup/ProposeLib.sol b/l1-contracts/src/core/libraries/rollup/ProposeLib.sol index 747bbf36dbdf..64f57a04f44a 100644 --- a/l1-contracts/src/core/libraries/rollup/ProposeLib.sol +++ b/l1-contracts/src/core/libraries/rollup/ProposeLib.sol @@ -9,7 +9,8 @@ import {TempCheckpointLog} from "@aztec/core/libraries/compressed-data/Checkpoin import {FeeHeader} from "@aztec/core/libraries/compressed-data/fees/FeeStructs.sol"; import {ChainTipsLib, CompressedChainTips} from "@aztec/core/libraries/compressed-data/Tips.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; -import {SignatureDomainSeparator, CommitteeAttestations} from "@aztec/core/libraries/rollup/AttestationLib.sol"; +import {CommitteeAttestations} from "@aztec/core/libraries/rollup/AttestationLib.sol"; +import {CoordinationSignatureLib} from "@aztec/core/libraries/rollup/CoordinationSignatureLib.sol"; import {OracleInput, FeeLib, ManaMinFeeComponents} from "@aztec/core/libraries/rollup/FeeLib.sol"; import {ValidatorSelectionLib} from "@aztec/core/libraries/rollup/ValidatorSelectionLib.sol"; import {Timestamp, Slot, Epoch, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; @@ -375,7 +376,11 @@ library ProposeLib { return FeeLib.getManaMinFeeComponentsAt(checkpointOfInterest, _timestamp, _inFeeAsset); } - function digest(ProposePayload memory _args) internal pure returns (bytes32) { - return keccak256(abi.encode(SignatureDomainSeparator.checkpointAttestation, _args)); + function digest(ProposePayload memory _args) internal view returns (bytes32) { + return digest(_args, address(this)); + } + + function digest(ProposePayload memory _args, address _verifyingContract) internal view returns (bytes32) { + return CoordinationSignatureLib.checkpointAttestationDigest(keccak256(abi.encode(_args)), _verifyingContract); } } diff --git a/l1-contracts/src/core/libraries/rollup/ValidatorSelectionLib.sol b/l1-contracts/src/core/libraries/rollup/ValidatorSelectionLib.sol index 41834351de54..5d1d8b0fad38 100644 --- a/l1-contracts/src/core/libraries/rollup/ValidatorSelectionLib.sol +++ b/l1-contracts/src/core/libraries/rollup/ValidatorSelectionLib.sol @@ -13,7 +13,6 @@ import {STFLib} from "@aztec/core/libraries/rollup/STFLib.sol"; import {Timestamp, Slot, Epoch, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; import {SignatureLib, Signature} from "@aztec/shared/libraries/SignatureLib.sol"; import {ECDSA} from "@oz/utils/cryptography/ECDSA.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {SlotDerivation} from "@oz/utils/SlotDerivation.sol"; import {Checkpoints} from "@oz/utils/structs/Checkpoints.sol"; @@ -94,7 +93,6 @@ import {TransientSlot} from "@oz/utils/TransientSlot.sol"; */ library ValidatorSelectionLib { using EnumerableSet for EnumerableSet.AddressSet; - using MessageHashUtils for bytes32; using SignatureLib for Signature; using TimeLib for Timestamp; using TimeLib for Epoch; @@ -273,14 +271,12 @@ library ValidatorSelectionLib { } // Check if the signature is correct - bytes32 digest = _digest.toEthSignedMessageHash(); Signature memory signature = _attestations.getSignature(proposerIndex); - SignatureLib.verify(signature, proposer, digest); + SignatureLib.verify(signature, proposer, _digest); // Check that the proposer have signed the `_attestations|_signers` data such that invalid `_attestations|_signers` // data can be attributed to the `proposer` specifically. - bytes32 attestationsAndSignersDigest = - _attestations.getAttestationsAndSignersDigest(_signers).toEthSignedMessageHash(); + bytes32 attestationsAndSignersDigest = _attestations.getAttestationsAndSignersDigest(_signers); SignatureLib.verify(_attestationsAndSignersSignature, proposer, attestationsAndSignersDigest); if (_updateCache) { @@ -333,8 +329,6 @@ library ValidatorSelectionLib { reconstructedCommittee: new address[](targetCommitteeSize) }); - bytes32 digest = _digest.toEthSignedMessageHash(); - bytes memory signaturesOrAddresses = _attestations.signaturesOrAddresses; uint256 dataPtr; assembly { @@ -360,7 +354,7 @@ library ValidatorSelectionLib { } ++stack.signaturesRecovered; - stack.reconstructedCommittee[i] = ECDSA.recover(digest, v, r, s); + stack.reconstructedCommittee[i] = ECDSA.recover(_digest, v, r, s); } else { address addr; assembly { diff --git a/l1-contracts/test/RollupGetters.t.sol b/l1-contracts/test/RollupGetters.t.sol index 2bd89fb1525a..dff58c1374e6 100644 --- a/l1-contracts/test/RollupGetters.t.sol +++ b/l1-contracts/test/RollupGetters.t.sol @@ -7,6 +7,7 @@ pragma solidity >=0.8.27; import {IRollupCore, CheckpointLog} from "@aztec/core/interfaces/IRollup.sol"; import {IStakingCore} from "@aztec/core/interfaces/IStaking.sol"; +import {IVerifier} from "@aztec/core/interfaces/IVerifier.sol"; import {TestConstants} from "./harnesses/TestConstants.sol"; import {Timestamp, Slot, Epoch} from "@aztec/shared/libraries/TimeMath.sol"; import {RewardConfig, Bps} from "@aztec/core/libraries/rollup/RewardLib.sol"; @@ -292,6 +293,21 @@ contract RollupShouldBeGetters is ValidatorSelectionTestBase { assertEq(writes.length, 0, "No writes should be done"); } + function test_getGenesisConfig() external setup(1, 1) { + vm.record(); + + bytes32 vkTreeRoot = rollup.getVkTreeRoot(); + bytes32 protocolContractsHash = rollup.getProtocolContractsHash(); + IVerifier epochProofVerifier = rollup.getEpochProofVerifier(); + + assertEq(vkTreeRoot, TestConstants.GENESIS_VK_TREE_ROOT, "invalid vkTreeRoot"); + assertEq(protocolContractsHash, TestConstants.GENESIS_PROTOCOL_CONTRACTS_HASH, "invalid protocolContractsHash"); + assertTrue(address(epochProofVerifier) != address(0), "epochProofVerifier not set"); + + (, bytes32[] memory writes) = vm.accesses(address(rollup)); + assertEq(writes.length, 0, "No writes should be done"); + } + function test_getRewardConfig() external setup(1, 1) { // By default, we will be replacing the reward distributor and booster addresses RewardConfig memory defaultConfig = TestConstants.getRewardConfig(); diff --git a/l1-contracts/test/benchmark/happy.t.sol b/l1-contracts/test/benchmark/happy.t.sol index 6dd152f38cff..80375cc6434f 100644 --- a/l1-contracts/test/benchmark/happy.t.sol +++ b/l1-contracts/test/benchmark/happy.t.sol @@ -56,7 +56,6 @@ import { FeeHeaderModel, ManaMinFeeComponentsModel } from "test/fees/FeeModelTestPoints.t.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {Timestamp, Slot, Epoch, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; import {MultiAdder, CheatDepositArgs} from "@aztec/mock/MultiAdder.sol"; import {RollupBuilder} from "../builder/RollupBuilder.sol"; @@ -98,7 +97,6 @@ contract FakeCanonical is IRewardDistributor { } contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { - using MessageHashUtils for bytes32; using stdStorage for StdStorage; using TimeLib for Slot; using TimeLib for Timestamp; @@ -286,7 +284,7 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { ProposePayload memory proposePayload = ProposePayload({archive: proposeArgs.archive, oracleInput: proposeArgs.oracleInput, headerHash: headerHash}); - bytes32 digest = ProposeLib.digest(proposePayload); + bytes32 digest = ProposeLib.digest(proposePayload, address(rollup)); // loop through to make sure we create an attestation for the proposer for (uint256 i = 0; i < validators.length; i++) { @@ -318,7 +316,9 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { if (proposer != address(0)) { attestationsAndSignersSignature = createAttestation( proposer, - AttestationLib.getAttestationsAndSignersDigest(AttestationLibHelper.packAttestations(attestations), signers) + AttestationLib.getAttestationsAndSignersDigest( + AttestationLibHelper.packAttestations(attestations), signers, address(rollup) + ) ).signature; } @@ -334,8 +334,7 @@ contract BenchmarkRollupTest is FeeModelTestPoints, DecoderBase { function createAttestation(address _signer, bytes32 _digest) internal view returns (CommitteeAttestation memory) { uint256 privateKey = attesterPrivateKeys[_signer]; - bytes32 digest = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); Signature memory signature = Signature({v: v, r: r, s: s}); // Address can be zero for signed attestations diff --git a/l1-contracts/test/compression/PreHeating.t.sol b/l1-contracts/test/compression/PreHeating.t.sol index f2c2e24e8fa5..5c9ff87495d5 100644 --- a/l1-contracts/test/compression/PreHeating.t.sol +++ b/l1-contracts/test/compression/PreHeating.t.sol @@ -55,7 +55,6 @@ import { FeeHeaderModel, ManaMinFeeComponentsModel } from "test/fees/FeeModelTestPoints.t.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {Timestamp, Slot, Epoch, TimeLib} from "@aztec/core/libraries/TimeLib.sol"; import {MultiAdder, CheatDepositArgs} from "@aztec/mock/MultiAdder.sol"; import {RollupBuilder} from "../builder/RollupBuilder.sol"; @@ -99,7 +98,6 @@ contract FakeCanonical is IRewardDistributor { * are testing some edges that will break if the `roundaboutSize` is wrong! */ contract PreHeatingTest is FeeModelTestPoints, DecoderBase { - using MessageHashUtils for bytes32; using TimeLib for Slot; using FeeLib for uint256; using FeeLib for ManaMinFeeComponents; @@ -343,7 +341,7 @@ contract PreHeatingTest is FeeModelTestPoints, DecoderBase { ProposePayload memory proposePayload = ProposePayload({archive: proposeArgs.archive, oracleInput: proposeArgs.oracleInput, headerHash: headerHash}); - bytes32 digest = ProposeLib.digest(proposePayload); + bytes32 digest = ProposeLib.digest(proposePayload, address(rollup)); // loop through to make sure we create an attestation for the proposer for (uint256 i = 0; i < validators.length; i++) { @@ -375,7 +373,9 @@ contract PreHeatingTest is FeeModelTestPoints, DecoderBase { if (proposer != address(0)) { attestationsAndSignersSignature = createAttestation( proposer, - AttestationLib.getAttestationsAndSignersDigest(AttestationLibHelper.packAttestations(attestations), signers) + AttestationLib.getAttestationsAndSignersDigest( + AttestationLibHelper.packAttestations(attestations), signers, address(rollup) + ) ).signature; } @@ -391,8 +391,7 @@ contract PreHeatingTest is FeeModelTestPoints, DecoderBase { function createAttestation(address _signer, bytes32 _digest) internal view returns (CommitteeAttestation memory) { uint256 privateKey = attesterPrivateKeys[_signer]; - bytes32 digest = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); Signature memory signature = Signature({v: v, r: r, s: s}); // Address can be zero for signed attestations diff --git a/l1-contracts/test/escape-hatch/e2e/escapeHatchReplacement.t.sol b/l1-contracts/test/escape-hatch/e2e/escapeHatchReplacement.t.sol index e76ded6a1a5f..2005c24fa70c 100644 --- a/l1-contracts/test/escape-hatch/e2e/escapeHatchReplacement.t.sol +++ b/l1-contracts/test/escape-hatch/e2e/escapeHatchReplacement.t.sol @@ -95,7 +95,7 @@ contract EscapeHatchReplacementTest is EscapeHatchIntegrationBase { uint256 committeeSize = data.committee.length; data.attestations = new CommitteeAttestation[](committeeSize); address[] memory signers = new address[](committeeSize); - bytes32 digest = ProposeLib.digest(proposePayload); + bytes32 digest = ProposeLib.digest(proposePayload, address(rollup)); for (uint256 i = 0; i < committeeSize; i++) { data.attestations[i] = _createAttestation(data.committee[i], digest); @@ -118,8 +118,9 @@ contract EscapeHatchReplacementTest is EscapeHatchIntegrationBase { // Proposer signs over attestations and signers Signature memory attestationsAndSignersSignature = - _createAttestation(proposer, AttestationLib.getAttestationsAndSignersDigest(data.packedAttestations, signers)) - .signature; + _createAttestation( + proposer, AttestationLib.getAttestationsAndSignersDigest(data.packedAttestations, signers, address(rollup)) + ).signature; vm.prank(proposer); rollup.propose( diff --git a/l1-contracts/test/escape-hatch/integration/EscapeHatchIntegrationBase.sol b/l1-contracts/test/escape-hatch/integration/EscapeHatchIntegrationBase.sol index 63fff8a63fb4..f578aa23dc9b 100644 --- a/l1-contracts/test/escape-hatch/integration/EscapeHatchIntegrationBase.sol +++ b/l1-contracts/test/escape-hatch/integration/EscapeHatchIntegrationBase.sol @@ -20,7 +20,6 @@ import { import {CheckpointLog, SubmitEpochRootProofArgs, PublicInputArgs} from "@aztec/core/interfaces/IRollup.sol"; import {IValidatorSelectionCore} from "@aztec/core/interfaces/IValidatorSelection.sol"; import {Strings} from "@oz/utils/Strings.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {AttestationLibHelper} from "@test/helper_libraries/AttestationLibHelper.sol"; @@ -30,7 +29,6 @@ import {AttestationLibHelper} from "@test/helper_libraries/AttestationLibHelper. * @dev Provides common setup, configuration, and helper functions for integration tests */ abstract contract EscapeHatchIntegrationBase is ValidatorSelectionTestBase { - using MessageHashUtils for bytes32; // ============ Escape Hatch Configuration ============ uint96 internal constant DEFAULT_BOND_SIZE = 100e18; uint96 internal constant DEFAULT_WITHDRAWAL_TAX = 1e18; @@ -215,7 +213,7 @@ abstract contract EscapeHatchIntegrationBase is ValidatorSelectionTestBase { uint256 committeeSize = committee.length; attestations = new CommitteeAttestation[](committeeSize); address[] memory signers = new address[](committeeSize); - bytes32 digest = ProposeLib.digest(proposePayload); + bytes32 digest = ProposeLib.digest(proposePayload, address(rollup)); for (uint256 i = 0; i < committeeSize; i++) { attestations[i] = _createAttestation(committee[i], digest); @@ -226,7 +224,9 @@ abstract contract EscapeHatchIntegrationBase is ValidatorSelectionTestBase { Signature memory attestationsAndSignersSignature = _createAttestation( proposer, - AttestationLib.getAttestationsAndSignersDigest(AttestationLibHelper.packAttestations(attestations), signers) + AttestationLib.getAttestationsAndSignersDigest( + AttestationLibHelper.packAttestations(attestations), signers, address(rollup) + ) ).signature; // Propose the checkpoint @@ -248,8 +248,7 @@ abstract contract EscapeHatchIntegrationBase is ValidatorSelectionTestBase { function _createAttestation(address _signer, bytes32 _digest) internal view returns (CommitteeAttestation memory) { uint256 privateKey = attesterPrivateKeys[_signer]; - bytes32 digest = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); Signature memory signature = Signature({v: v, r: r, s: s}); return CommitteeAttestation({addr: _signer, signature: signature}); diff --git a/l1-contracts/test/escape-hatch/integration/invalidate.t.sol b/l1-contracts/test/escape-hatch/integration/invalidate.t.sol index 0cb06f20eb70..f4709a59eabf 100644 --- a/l1-contracts/test/escape-hatch/integration/invalidate.t.sol +++ b/l1-contracts/test/escape-hatch/integration/invalidate.t.sol @@ -153,7 +153,7 @@ contract invalidateTest is EscapeHatchIntegrationBase { uint256 committeeSize = data.committee.length; data.attestations = new CommitteeAttestation[](committeeSize); address[] memory signers = new address[](committeeSize); - bytes32 digest = ProposeLib.digest(proposePayload); + bytes32 digest = ProposeLib.digest(proposePayload, address(rollup)); for (uint256 i = 0; i < committeeSize; i++) { data.attestations[i] = _createAttestation(data.committee[i], digest); @@ -177,7 +177,9 @@ contract invalidateTest is EscapeHatchIntegrationBase { Signature memory attestationsAndSignersSignature = _createAttestation( proposer, - AttestationLib.getAttestationsAndSignersDigest(AttestationLibHelper.packAttestations(data.attestations), signers) + AttestationLib.getAttestationsAndSignersDigest( + AttestationLibHelper.packAttestations(data.attestations), signers, address(rollup) + ) ).signature; // Propose the checkpoint diff --git a/l1-contracts/test/validator-selection/ValidatorSelection.t.sol b/l1-contracts/test/validator-selection/ValidatorSelection.t.sol index fe7240e16920..17112c7d3b21 100644 --- a/l1-contracts/test/validator-selection/ValidatorSelection.t.sol +++ b/l1-contracts/test/validator-selection/ValidatorSelection.t.sol @@ -16,7 +16,6 @@ import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Timestamp, Epoch} from "@aztec/core/libraries/TimeLib.sol"; import {IPayload} from "@aztec/core/slashing/Slasher.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {ProposedHeaderLib} from "@aztec/core/libraries/rollup/ProposedHeaderLib.sol"; @@ -121,7 +120,6 @@ library TestFlagsLib { * The tests in this file is testing the sequencer selection */ contract ValidatorSelectionTest is ValidatorSelectionTestBase { - using MessageHashUtils for bytes32; using TestFlagsLib for TestFlags; bytes4 NO_REVERT = bytes4(0); @@ -565,7 +563,7 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { ree.attestations = new CommitteeAttestation[](ree.attestationsCount); ree.signers = new address[](_signatureCount); - bytes32 digest = ProposeLib.digest(ree.proposePayload); + bytes32 digest = ProposeLib.digest(ree.proposePayload, address(rollup)); { uint256 signersIndex = 0; @@ -612,7 +610,7 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { ree.attestationsAndSignersSignature = _createAttestation( ree.proposer, AttestationLib.getAttestationsAndSignersDigest( - AttestationLibHelper.packAttestations(ree.attestations), ree.signers + AttestationLibHelper.packAttestations(ree.attestations), ree.signers, address(rollup) ) ).signature; } @@ -665,7 +663,7 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { ree.attestationsAndSignersSignature = _createAttestation( ree.proposer, AttestationLib.getAttestationsAndSignersDigest( - AttestationLibHelper.packAttestations(ree.attestations), ree.signers + AttestationLibHelper.packAttestations(ree.attestations), ree.signers, address(rollup) ) ).signature; } else if (ree.proposer != address(0) && _flags.invalidAttestationAndSignersSignature) { @@ -680,7 +678,7 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { ree.attestationsAndSignersSignature = _createAttestation( invalidSigner, AttestationLib.getAttestationsAndSignersDigest( - AttestationLibHelper.packAttestations(ree.attestations), ree.signers + AttestationLibHelper.packAttestations(ree.attestations), ree.signers, address(rollup) ) ).signature; } @@ -767,8 +765,7 @@ contract ValidatorSelectionTest is ValidatorSelectionTestBase { function _createAttestation(address _signer, bytes32 _digest) internal view returns (CommitteeAttestation memory) { uint256 privateKey = attesterPrivateKeys[_signer]; - bytes32 digest = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); Signature memory signature = Signature({v: v, r: r, s: s}); return CommitteeAttestation({addr: _signer, signature: signature}); diff --git a/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol b/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol index 16dbae9d37b6..95f7cfb445b2 100644 --- a/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol +++ b/l1-contracts/test/validator-selection/ValidatorSelectionBase.sol @@ -13,7 +13,6 @@ import {Registry} from "@aztec/governance/Registry.sol"; import {Rollup} from "@aztec/core/Rollup.sol"; import {MerkleTestUtil} from "../merkle/TestUtil.sol"; import {TestERC20} from "@aztec/mock/TestERC20.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {TestConstants} from "../harnesses/TestConstants.sol"; import {Epoch, Timestamp} from "@aztec/core/libraries/TimeLib.sol"; @@ -37,7 +36,6 @@ import {Math} from "@oz/utils/math/Math.sol"; * The tests in this file is testing the sequencer selection */ contract ValidatorSelectionTestBase is DecoderBase { - using MessageHashUtils for bytes32; using stdStorage for StdStorage; struct ProposeTestData { diff --git a/l1-contracts/test/validator-selection/tmnt207.t.sol b/l1-contracts/test/validator-selection/tmnt207.t.sol index 92dc377c1569..233bbb6581d0 100644 --- a/l1-contracts/test/validator-selection/tmnt207.t.sol +++ b/l1-contracts/test/validator-selection/tmnt207.t.sol @@ -36,7 +36,6 @@ import {CheatDepositArgs} from "@aztec/mock/MultiAdder.sol"; import {BN254Lib, G1Point, G2Point} from "@aztec/shared/libraries/BN254Lib.sol"; import {StakingQueueConfig} from "@aztec/core/libraries/compressed-data/StakingQueueConfig.sol"; import {ProposedHeaderLib} from "@aztec/core/libraries/rollup/ProposedHeaderLib.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import { IRollup, IRollupCore, @@ -56,7 +55,6 @@ struct Checkpoint { } contract Tmnt207Test is RollupBase { - using MessageHashUtils for bytes32; using ProposeLib for ProposeArgs; using TimeLib for Timestamp; using TimeLib for Slot; @@ -277,7 +275,7 @@ contract Tmnt207Test is RollupBase { ProposePayload memory proposePayload = ProposePayload({archive: proposeArgs.archive, oracleInput: proposeArgs.oracleInput, headerHash: headerHash}); - bytes32 digest = ProposeLib.digest(proposePayload); + bytes32 digest = ProposeLib.digest(proposePayload, address(rollup)); // loop through to make sure we create an attestation for the proposer for (uint256 i = 0; i < validators.length; i++) { @@ -309,7 +307,9 @@ contract Tmnt207Test is RollupBase { if (proposer != address(0)) { attestationsAndSignersSignature = createAttestation( proposer, - AttestationLib.getAttestationsAndSignersDigest(AttestationLibHelper.packAttestations(attestations), signers) + AttestationLib.getAttestationsAndSignersDigest( + AttestationLibHelper.packAttestations(attestations), signers, address(rollup) + ) ).signature; } @@ -325,8 +325,7 @@ contract Tmnt207Test is RollupBase { function createAttestation(address _signer, bytes32 _digest) internal view returns (CommitteeAttestation memory) { uint256 privateKey = attesterPrivateKeys[_signer]; - bytes32 digest = _digest.toEthSignedMessageHash(); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, _digest); Signature memory signature = Signature({v: v, r: r, s: s}); // Address can be zero for signed attestations diff --git a/spartan/bootstrap.sh b/spartan/bootstrap.sh index 3850c4c4133d..a27c2a895fa1 100755 --- a/spartan/bootstrap.sh +++ b/spartan/bootstrap.sh @@ -183,13 +183,11 @@ function block_capacity_bench_cmds { } function bench_10tps_cmds { - # Single 38-min sustained 10 TPS run on the bench-10tps network. Long enough - # for the sequencer to hit steady state beyond the initial mempool fill-up. local high_value_tps=10 local low_value_tps=0 - local test_duration=2280 # 38 min - local timeout=3600 # 1h — test plus drain/teardown buffer - echo "$(hash):TIMEOUT=${timeout} BENCH_OUTPUT=bench-out/n_tps.10tps.bench.json BENCH_SCENARIO=10tps LOW_VALUE_TPS=${low_value_tps} HIGH_VALUE_TPS=${high_value_tps} TEST_DURATION_SECONDS=${test_duration} $root/yarn-project/end-to-end/scripts/run_test.sh simple n_tps.test.ts" + local test_duration=${TEST_DURATION_SECONDS:-600} # 10 mins + local timeout=${BENCH_TIMEOUT_SECONDS:-3600} + echo "$(hash):TIMEOUT=${timeout} BENCH_RUN_ID=${BENCH_RUN_ID:-} BENCH_OUTPUT=bench-out/n_tps.10tps.bench.json BENCH_SCENARIO=10tps LOW_VALUE_TPS=${low_value_tps} HIGH_VALUE_TPS=${high_value_tps} TEST_DURATION_SECONDS=${test_duration} $root/yarn-project/end-to-end/scripts/run_test.sh simple n_tps.test.ts" } function network_bench { @@ -245,7 +243,100 @@ function bench_10tps { gcp_auth export_admin_api_key export K8S_ENRICHER=${K8S_ENRICHER:-1} + export BENCH_RUN_ID="${BENCH_RUN_ID:-$(date -u +%Y%m%d)-${COMMIT_HASH:0:10}}" bench_10tps_cmds | parallelize 1 + + local metadata="/tmp/n_tps_timing_data.json" + local run_json="bench-out/bench-10tps-${BENCH_RUN_ID}.json" + if [[ -f "$metadata" ]]; then + local started=$(jq -r .startedAt < "$metadata") + local ended=$(jq -r .endedAt < "$metadata") + echo "Scraping bench-10tps run ${BENCH_RUN_ID} (started=${started} ended=${ended})" + NAMESPACE="$NAMESPACE" ./scripts/bench_10tps/bench_scrape.ts \ + --run-id "$BENCH_RUN_ID" \ + --started "$started" \ + --ended "$ended" \ + --target-tps 10 \ + --workload sha256_hash_1024 \ + --output "$run_json" \ + --wait-for-pending-zero \ + --max-pending-wait-seconds "${BENCH_SCRAPE_MAX_PENDING_WAIT_SECONDS:-3600}" \ + || echo "[bench_10tps] scraper failed (non-fatal)" + network_bench_upload "$run_json" || echo "[network_bench] upload failed (non-fatal)" + else + echo "[bench_10tps] no timing metadata at ${metadata}; skipping scraper" + fi +} + +function network_bench_upload { + local run_json=$1 + if [[ "${CI:-0}" != "1" ]]; then + echo "[network_bench] CI != 1, skipping upload (run JSON at ${run_json})" + return 0 + fi + if [[ ! -f "$run_json" ]]; then + echo "[network_bench] no run JSON at ${run_json}; skipping upload" + return 0 + fi + + # Reject anything that's not the schema we've designed the index against. + local schema=$(jq -r .schemaVersion "$run_json") + if [[ "$schema" != "3" ]]; then + echo "[network_bench] run JSON has schemaVersion '$schema', expected '3'; skipping upload" + return 0 + fi + + local bucket="gs://aztec-testnet/network_bench" + local run_id=$(jq -r .run.runId "$run_json") + local target="${bucket}/${run_id}.json" + + echo "[network_bench] uploading ${run_json} to ${target}" + gcloud storage cp "$run_json" "$target" + + local entry=$(jq '{ + runId: .run.runId, + path: (.run.runId + ".json"), + startedAt: .run.startedAt, + endedAt: .run.endedAt, + targetTps: .run.targetTps, + workload: .run.workload, + testDurationSeconds: .run.testDurationSeconds, + namespace: .run.namespace, + headlineKpi: .summary.headlineKpi, + inclusionTpsMean: .summary.inclusionTpsMean, + inclusionTpsPeak: .summary.inclusionTpsPeak, + totalTxsMined: .summary.totalTxsMined, + reorgCount: .summary.reorgCount + }' "$run_json") + + local idx_local + idx_local=$(mktemp) + trap "rm -f $idx_local ${idx_local}.new" RETURN + # Distinguish "index does not exist yet" (404 -> seed empty) from real errors + # (auth/network/permission -> fail closed). Without this probe, a naive + # `cp ... 2>/dev/null || seed_empty` would silently overwrite a healthy index + # with a single-entry one whenever GCS hiccups. + local desc_err + if desc_err=$(gcloud storage objects describe "${bucket}/index.json" 2>&1 >/dev/null); then + gcloud storage cp "${bucket}/index.json" "$idx_local" + elif echo "$desc_err" | grep -qiE 'not.?found|matched no objects|404'; then + echo "[network_bench] no remote index.json yet; seeding empty" + echo '{"schemaVersion":"1","runs":[]}' > "$idx_local" + else + echo "[network_bench] cannot read remote index.json:" + echo "$desc_err" | head -5 + return 1 + fi + + jq --argjson entry "$entry" --arg ts "$(date -u +%Y-%m-%dT%H:%M:%SZ)" ' + .schemaVersion = "1" + | .generatedAt = $ts + | .runs = ((.runs // []) | map(select(.runId != $entry.runId)) + [$entry] + | sort_by(.endedAt) | reverse) + ' "$idx_local" > "${idx_local}.new" + + gcloud storage cp "${idx_local}.new" "${bucket}/index.json" + echo "[network_bench] updated ${bucket}/index.json" } function ensure_eth_balances { diff --git a/spartan/environments/bench-10tps.env b/spartan/environments/bench-10tps.env index f3fd9c363818..b4782d9e35e8 100644 --- a/spartan/environments/bench-10tps.env +++ b/spartan/environments/bench-10tps.env @@ -17,6 +17,7 @@ AZTEC_SLOT_DURATION=72 AZTEC_PROOF_SUBMISSION_EPOCHS=2 AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET=1 AZTEC_LAG_IN_EPOCHS_FOR_RANDAO=1 +AZTEC_INBOX_LAG=2 # 2B mana target - good for about ~800 txs at 2.5M mana each AZTEC_MANA_TARGET=2000000000 @@ -26,7 +27,7 @@ SPONSORED_FPC=true OTEL_COLLECTOR_ENDPOINT=REPLACE_WITH_GCP_SECRET VALIDATOR_REPLICAS=3 -VALIDATORS_PER_NODE=24 +VALIDATORS_PER_NODE=20 VALIDATOR_PUBLISHERS_PER_REPLICA=4 VALIDATOR_PUBLISHER_MNEMONIC_START_INDEX=5000 VALIDATOR_RESOURCE_PROFILE="prod-spot" @@ -39,6 +40,7 @@ SEQ_MAX_TX_PER_CHECKPOINT=800 P2P_MAX_PENDING_TX_COUNT=20000 SEQ_MIN_TX_PER_BLOCK=1 SEQ_BUILD_CHECKPOINT_IF_EMPTY=true +SEQ_ENABLE_PROPOSER_PIPELINING=true RPC_REPLICAS=1 RPC_RESOURCE_PROFILE="prod" @@ -64,7 +66,7 @@ AZTEC_SLASHING_OFFSET_IN_ROUNDS=1 AZTEC_LOCAL_EJECTION_THRESHOLD=90000000000000000000 DEBUG_P2P_INSTRUMENT_MESSAGES=true -LOG_LEVEL='info;debug:simulator:public-processor' +LOG_LEVEL='info;debug:simulator:public-processor,sequencer:state,sequencer:checkpoint-events' VALIDATOR_L1_PRIORITY_FEE_BUMP_PERCENTAGE=0 VALIDATOR_L1_PRIORITY_FEE_RETRY_BUMP_PERCENTAGE=0 diff --git a/spartan/metrics/grafana/alerts/contactpoints.yaml b/spartan/metrics/grafana/alerts/contactpoints.yaml index e12c618ab61b..af9593be2437 100644 --- a/spartan/metrics/grafana/alerts/contactpoints.yaml +++ b/spartan/metrics/grafana/alerts/contactpoints.yaml @@ -8,39 +8,39 @@ contactPoints: settings: url: $SLACK_WEBHOOK_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "k8s_namespace_name") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-namespace{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "k8s_namespace_name") }}{{ . }}{{ else }}unknown-namespace{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ "{{" }} + {{ .ExternalURL }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}{{ "{{" }} printf "\n" {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}{{ printf "\n" }} *GKE workloads:* - https://console.cloud.google.com/kubernetes/workload/overview?project={{ "{{" }} + https://console.cloud.google.com/kubernetes/workload/overview?project={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ end }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}%22%5D%29%29 + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}%22%5D%29%29 disableResolveMessage: false - orgId: 1 @@ -51,39 +51,39 @@ contactPoints: settings: url: $SLACK_WEBHOOK_NEXT_SCENARIO_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "k8s_namespace_name") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-namespace{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "k8s_namespace_name") }}{{ . }}{{ else }}unknown-namespace{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ "{{" }} + {{ .ExternalURL }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}{{ "{{" }} printf "\n" {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}{{ printf "\n" }} *GKE workloads:* - https://console.cloud.google.com/kubernetes/workload/overview?project={{ "{{" }} + https://console.cloud.google.com/kubernetes/workload/overview?project={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ end }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}%22%5D%29%29 + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}%22%5D%29%29 disableResolveMessage: false - orgId: 1 @@ -94,39 +94,39 @@ contactPoints: settings: url: $SLACK_WEBHOOK_NEXT_NET_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "k8s_namespace_name") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-namespace{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "k8s_namespace_name") }}{{ . }}{{ else }}unknown-namespace{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ "{{" }} + {{ .ExternalURL }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}{{ "{{" }} printf "\n" {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}{{ printf "\n" }} *GKE workloads:* - https://console.cloud.google.com/kubernetes/workload/overview?project={{ "{{" }} + https://console.cloud.google.com/kubernetes/workload/overview?project={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ end }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}%22%5D%29%29 + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}%22%5D%29%29 disableResolveMessage: false - orgId: 1 @@ -137,39 +137,39 @@ contactPoints: settings: url: $SLACK_WEBHOOK_TESTNET_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "k8s_namespace_name") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-namespace{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "k8s_namespace_name") }}{{ . }}{{ else }}unknown-namespace{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ "{{" }} + {{ .ExternalURL }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}{{ "{{" }} printf "\n" {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}{{ printf "\n" }} *GKE workloads:* - https://console.cloud.google.com/kubernetes/workload/overview?project={{ "{{" }} + https://console.cloud.google.com/kubernetes/workload/overview?project={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ end }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}%22%5D%29%29 + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}%22%5D%29%29 disableResolveMessage: false - orgId: 1 @@ -180,39 +180,39 @@ contactPoints: settings: url: $SLACK_WEBHOOK_MAINNET_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "k8s_namespace_name") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-namespace{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "k8s_namespace_name") }}{{ . }}{{ else }}unknown-namespace{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ "{{" }} + {{ .ExternalURL }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}{{ "{{" }} printf "\n" {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}{{ printf "\n" }} *GKE workloads:* - https://console.cloud.google.com/kubernetes/workload/overview?project={{ "{{" }} + https://console.cloud.google.com/kubernetes/workload/overview?project={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ end }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}%22%5D%29%29 + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}%22%5D%29%29 disableResolveMessage: false - orgId: 1 @@ -223,24 +223,24 @@ contactPoints: settings: url: $SLACK_WEBHOOK_TESTNET_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "network") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-network{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "network") }}{{ . }}{{ else }}unknown-network{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/bexge5lz3e51cd/p2p-bootnodes?orgId=1&var-network={{ "{{" }} + {{ .ExternalURL }}d/bexge5lz3e51cd/p2p-bootnodes?orgId=1&var-network={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "network") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "network") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }} disableResolveMessage: false - orgId: 1 @@ -251,24 +251,24 @@ contactPoints: settings: url: $SLACK_WEBHOOK_MAINNET_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "network") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-network{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "network") }}{{ . }}{{ else }}unknown-network{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/bexge5lz3e51cd/p2p-bootnodes?orgId=1&var-network={{ "{{" }} + {{ .ExternalURL }}d/bexge5lz3e51cd/p2p-bootnodes?orgId=1&var-network={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "network") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "network") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }} disableResolveMessage: false - orgId: 1 @@ -279,37 +279,37 @@ contactPoints: settings: url: $SLACK_WEBHOOK_DEVNET_URL text: |- - {{ "{{" }} if gt (len .Alerts) 0 {{ "}}" }} + {{ if gt (len .Alerts) 0 }} *Alerts:* - {{ "{{" }} range .Alerts {{ "}}" }} - - {{ "{{" }} with (index .Labels "k8s_namespace_name") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown-namespace{{ "{{" }} end {{ "}}" }}: {{ "{{" }} with (index .Annotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} - {{ "{{" }} else {{ "}}" }} + {{ range .Alerts }} + - {{ with (index .Labels "k8s_namespace_name") }}{{ . }}{{ else }}unknown-namespace{{ end }}: {{ with (index .Annotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} + {{ else }} *Alerts:* - - {{ "{{" }} with (index .CommonAnnotations "summary") {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}(no summary){{ "{{" }} end {{ "}}" }} - {{ "{{" }} end {{ "}}" }} + - {{ with (index .CommonAnnotations "summary") }}{{ . }}{{ else }}(no summary){{ end }} + {{ end }} *Grafana overview:* - {{ "{{" }} .ExternalURL {{ "}}" }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ "{{" }} + {{ .ExternalURL }}d/aztec-network/network-overview?orgId=1&refresh=30s&var-data_source=default&var-namespace={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}{{ "{{" }} printf "\n" {{ "}}" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}{{ printf "\n" }} *GKE workloads:* - https://console.cloud.google.com/kubernetes/workload/overview?project={{ "{{" }} + https://console.cloud.google.com/kubernetes/workload/overview?project={{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "gcp_project") - {{ "}}" }}{{ "{{" }} . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ "{{" }} + }}{{ . }}{{ else }}unknown{{ end }}{{ end }}&supportedpurview=project&pageState=%28%22savedViews%22%3A%28%22n%22%3A%5B%22{{ if gt (len .Alerts) 0 - {{ "}}" }}{{ "{{" }} + }}{{ with (index (index .Alerts 0).Labels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} else {{ "}}" }}{{ "{{" }} + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ else }}{{ with (index .CommonLabels "k8s_namespace_name") - {{ "}}" }}{{ "{{" }} urlquery . {{ "}}" }}{{ "{{" }} else {{ "}}" }}unknown{{ "{{" }} end {{ "}}" }}{{ "{{" }} end {{ "}}" }}%22%5D%29%29 + }}{{ urlquery . }}{{ else }}unknown{{ end }}{{ end }}%22%5D%29%29 disableResolveMessage: false diff --git a/spartan/metrics/grafana/alerts/rules.yaml b/spartan/metrics/grafana/alerts/rules.yaml index 213204f4da5c..1b64f1274650 100644 --- a/spartan/metrics/grafana/alerts/rules.yaml +++ b/spartan/metrics/grafana/alerts/rules.yaml @@ -173,7 +173,7 @@ groups: for: 5m annotations: summary: Fisherman detected block re-execution failure - description: Fisherman node ({{ ` {{ $labels.k8s_namespace_name }} ` }}) failed to re-execute block(s). + description: Fisherman node ({{ $labels.k8s_namespace_name }}) failed to re-execute block(s). labels: <<: *common_labels isPaused: false @@ -188,7 +188,7 @@ groups: datasourceUid: spartan-metrics-prometheus model: editorMode: code - expr: sum by (k8s_namespace_name, aztec_error_type) (increase(aztec_sequencer_block_proposal_failed_count{k8s_namespace_name=~".*(fisherman|mainnet).*"}[$__rate_interval])) + expr: sum by (k8s_namespace_name, aztec_error_type) (increase(aztec_sequencer_block_proposal_failed_count{k8s_namespace_name=~".*(fisherman|mainnet).*", aztec_error_type!="insufficient_txs"}[$__rate_interval])) instant: true intervalMs: 60000 legendFormat: __auto @@ -258,7 +258,7 @@ groups: for: 5m annotations: summary: Fisherman node failed to build validation block - description: Fisherman node ({{ ` {{ $labels.k8s_namespace_name }} ` }}) failed to build block. Reason - {{ ` {{ $labels.aztec_error_type }} ` }}. + description: Fisherman node ({{ $labels.k8s_namespace_name }}) failed to build block. Reason - {{ $labels.aztec_error_type }}. labels: <<: *common_labels isPaused: false @@ -343,7 +343,7 @@ groups: for: 5m annotations: summary: Fisherman node failed pre-build checks - description: Fisherman node ({{ ` {{ $labels.k8s_namespace_name }} ` }}) failed pre-build checks. Check type - {{ ` {{ $labels.aztec_error_type }} ` }}. + description: Fisherman node ({{ $labels.k8s_namespace_name }}) failed pre-build checks. Check type - {{ $labels.aztec_error_type }}. labels: <<: *common_labels isPaused: false @@ -428,7 +428,7 @@ groups: for: 5m annotations: summary: Fisherman node received invalid attestations - description: Fisherman node ({{ ` {{ $labels.k8s_namespace_name }} ` }}) received invalid attestations from other validators. Reason - {{ ` {{ $labels.aztec_error_type }} ` }}. + description: Fisherman node ({{ $labels.k8s_namespace_name }}) received invalid attestations from other validators. Reason - {{ $labels.aztec_error_type }}. labels: <<: *common_labels isPaused: false @@ -512,7 +512,7 @@ groups: execErrState: Error for: 10m annotations: - summary: Pending chain in {{ ` {{ $labels.k8s_namespace_name }} ` }} hasn't advanced in 10 minutes + summary: Pending chain in {{ $labels.k8s_namespace_name }} hasn't advanced in 10 minutes labels: <<: *common_labels isPaused: false @@ -596,7 +596,7 @@ groups: execErrState: Error for: 0s annotations: - summary: Pending chain has re-orged in {{ ` {{ $labels.k8s_namespace_name }} ` }} + summary: Pending chain has re-orged in {{ $labels.k8s_namespace_name }} labels: <<: *common_labels isPaused: false @@ -680,7 +680,7 @@ groups: execErrState: Error for: 1m annotations: - summary: Attestations collection is taking over 10 seconds in {{ ` {{ $labels.k8s_namespace_name }} ` }} + summary: Attestations collection is taking over 10 seconds in {{ $labels.k8s_namespace_name }} labels: <<: *common_labels isPaused: false @@ -764,7 +764,7 @@ groups: execErrState: Error for: 5m annotations: - summary: High CPU usage in {{ ` {{ $labels.k8s_namespace_name }} ` }} by {{ ` {{ $labels.service_name }} ` }} + summary: High CPU usage in {{ $labels.k8s_namespace_name }} by {{ $labels.service_name }} labels: <<: *common_labels isPaused: false @@ -848,7 +848,7 @@ groups: execErrState: Error for: 5m annotations: - summary: High memory usage in {{ ` {{ $labels.k8s_namespace_name }} ` }} by {{ ` {{ $labels.service_name }} ` }} + summary: High memory usage in {{ $labels.k8s_namespace_name }} by {{ $labels.service_name }} labels: <<: *common_labels isPaused: false @@ -932,7 +932,7 @@ groups: execErrState: Error for: 1m annotations: - summary: Something is blocking the main thread in {{ ` {{ $labels.service_name }} ` }} ({{ ` {{ $labels.k8s_namespace_name }} ` }}) + summary: Something is blocking the main thread in {{ $labels.service_name }} ({{ $labels.k8s_namespace_name }}) labels: <<: *common_labels isPaused: false @@ -1016,7 +1016,7 @@ groups: execErrState: Error for: 1m annotations: - summary: Critical errors in the WorldState of {{ ` {{ $labels.service_name }} ` }} ({{ ` {{ $labels.k8s_namespace_name }} ` }}) + summary: Critical errors in the WorldState of {{ $labels.service_name }} ({{ $labels.k8s_namespace_name }}) labels: <<: *common_labels isPaused: false @@ -1182,7 +1182,7 @@ groups: execErrState: Error for: 0s annotations: - summary: There was an error taking a snapshot in {{ ` {{ $labels.k8s_namespace_name }} ` }} + summary: There was an error taking a snapshot in {{ $labels.k8s_namespace_name }} labels: <<: *common_labels isPaused: false @@ -1271,7 +1271,7 @@ groups: execErrState: Error for: 1m annotations: - summary: Prometheus scraping for job {{ ` {{ $labels.job }} ` }} takes more than 7.5s + summary: Prometheus scraping for job {{ $labels.job }} takes more than 7.5s labels: <<: *common_labels isPaused: false @@ -1336,7 +1336,7 @@ groups: __panelId__: "5" description: "" runbook_url: "" - summary: One or more bootnodes for {{ ` {{ $labels.network }} ` }} may have stopped. + summary: One or more bootnodes for {{ $labels.network }} may have stopped. labels: "": "" isPaused: false @@ -1421,7 +1421,7 @@ groups: for: 5m annotations: summary: Proof failed - description: One or more proving jobs were rejected in {{ ` {{ $labels.k8s_namespace_name }} ` }}. + description: One or more proving jobs were rejected in {{ $labels.k8s_namespace_name }}. labels: <<: *common_labels isPaused: false diff --git a/spartan/metrics/values.yaml b/spartan/metrics/values.yaml index d5dce4d246fc..5919fc8c34b9 100644 --- a/spartan/metrics/values.yaml +++ b/spartan/metrics/values.yaml @@ -127,14 +127,6 @@ grafana: TESTNET_NAMESPACES_REGEX: "testnet|v[0-9]+-testnet" MAINNET_NAMESPACES_REGEX: "mainnet|v[0-9]+-mainnet|ignition" DEVNET_NAMESPACES_REGEX: ".*devnet.*" - SLACK_WEBHOOK_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_STAGING_PUBLIC_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_STAGING_IGNITION_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_NEXT_SCENARIO_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_NEXT_NET_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_TESTNET_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_MAINNET_URL: "http://127.0.0.1" # dummy value - SLACK_WEBHOOK_DEVNET_URL: "http://127.0.0.1" # dummy value datasources: datasources.yaml: apiVersion: 1 @@ -153,7 +145,7 @@ grafana: label: grafana_dashboard folderAnnotation: grafana_dashboard_folder provider: - foldersFromFilesStructure: false + foldersFromFilesStructure: true alerts: enabled: true label: grafana_alert diff --git a/spartan/scripts/bench_10tps/bench_output.schema.json b/spartan/scripts/bench_10tps/bench_output.schema.json new file mode 100644 index 000000000000..b767c55cba52 --- /dev/null +++ b/spartan/scripts/bench_10tps/bench_output.schema.json @@ -0,0 +1,447 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/AztecProtocol/aztec-packages/benchmark-design/benchmark-output.schema.json", + "title": "Aztec 10 TPS benchmark — per-run output", + "description": "One JSON per benchmark run, self-contained. Produced by bench_scrape.ts after a run finishes. Two scrape paths, run independently so one failing does not prevent the other from populating: (a) PromQL query_range for continuous time-series; (b) gcloud log scrape for per-block records and discrete events.", + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "run", + "summary", + "timeSeries", + "blocks", + "events" + ], + "properties": { + "schemaVersion": { + "type": "string", + "const": "3", + "description": "Bump when breaking the schema. Old JSONs keep their previous version so the dashboard can render them side-by-side. v3: timeSeries entries carry `series: [{labels, points}]` instead of bare `points` to support per-pod / per-label data." + }, + "run": { "$ref": "#/$defs/runMeta" }, + "summary": { + "$ref": "#/$defs/summary", + "description": "Scalar reductions — one number per run. Shown on the dashboard trend page and duplicated into index.json so the index can render headline numbers without fetching every run." + }, + "timeSeries": { + "$ref": "#/$defs/timeSeriesSection", + "description": "PromQL query_range results. Continuous-sampled metrics keyed by unixEpoch; the dashboard normalises to time-within-run via unixEpoch - run.startedAt at render time so multiple runs can overlay on the same x-axis." + }, + "blocks": { + "type": "array", + "description": "Per-block records parsed from structured logs (each block emits one `Processed N successful txs and M failed txs ...` info line). Authoritative for per-block facts — Prometheus histograms cannot recover per-block samples.", + "items": { "$ref": "#/$defs/blockRecord" } + }, + "events": { + "type": "array", + "description": "Discrete occurrences during the run (currently just chain prunes). Typically rendered as annotations on charts.", + "items": { "$ref": "#/$defs/event" } + }, + "sequencerStateSlots": { + "type": "array", + "description": "Per-slot sequencer state time budget reconstructed from structured state-transition logs. Optional for older runs and empty when sequencer debug logs were not enabled.", + "items": { "$ref": "#/$defs/sequencerStateSlot" } + }, + "notes": { + "type": "array", + "items": { "type": "string" }, + "description": "Free-form operator notes (e.g. 'ran with doubled prover agents'). Optional." + } + }, + + "$defs": { + "runMeta": { + "type": "object", + "additionalProperties": false, + "required": ["runId", "startedAt", "endedAt", "namespace"], + "properties": { + "runId": { + "type": "string", + "description": "ULID/UUID. Same value stamped on pods as BENCH_RUN_ID label when that instrumentation lands." + }, + "startedAt": { + "type": "string", + "format": "date-time", + "description": "Wall-clock time the load generator started sending txs. Dashboards normalise time-series x-axes against this for cross-run overlay." + }, + "endedAt": { + "type": "string", + "format": "date-time", + "description": "Wall-clock time the load generator stopped sending txs (not the test-teardown time)." + }, + "inclusionEndedAt": { + "type": "string", + "format": "date-time", + "description": "Wall-clock time the scraper considers proposer-visible load fully included. With pending-drain scraping enabled, this is when validator pending TxPool depth first reached zero; otherwise it falls back to the bounded scrape window end. RPC/full-node pending may remain non-zero when load failed to propagate to validators." + }, + "drainEndedAt": { + "type": "string", + "format": "date-time", + "description": "Wall-clock time the scraper began querying Prometheus. Typically endedAt + ~90s to let the OTel batch push (60s default) and one Prom scrape (15s) settle." + }, + "namespace": { "type": "string", "examples": ["bench-10tps"] }, + "gcpProject": { + "type": "string", + "description": "GCP project containing the GKE container logs." + }, + "gcpLocation": { + "type": "string", + "description": "GKE cluster location used by Cloud Logging resource labels." + }, + "gkeCluster": { + "type": "string", + "description": "GKE cluster name used by Cloud Logging resource labels." + }, + "image": { + "type": "string", + "description": "Aztec image tag or digest the validators ran." + }, + "targetTps": { "type": "number", "minimum": 0 }, + "testDurationSeconds": { "type": "integer", "minimum": 0 }, + "workload": { "type": "string", "examples": ["sha256_hash_1024"] }, + "aztecConfig": { + "type": "object", + "additionalProperties": { "type": "string" }, + "description": "Curated subset of Aztec config env vars captured from a running validator pod. Keys include SEQ_MAX_TX_PER_BLOCK, P2P_MAX_PENDING_TX_COUNT, AZTEC_MANA_TARGET, etc. Lets the dashboard show 'pool=20k vs pool=1000' alongside compared runs." + }, + "chaosMesh": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { "type": "boolean" }, + "profile": { + "type": "string", + "examples": ["network-requirements"] + } + } + }, + "scrapeConfig": { + "type": "object", + "description": "What the scraper itself did. Kept so a future maintainer can tell whether a gappy time-series was caused by too-short drain or too-large step.", + "additionalProperties": false, + "properties": { + "drainSeconds": { + "type": "integer", + "minimum": 0, + "description": "Seconds waited between endedAt and the first Prom query." + }, + "stepSeconds": { + "type": "integer", + "minimum": 1, + "description": "PromQL range-query step." + }, + "promUrl": { "type": "string" }, + "waitForPendingZero": { + "type": "boolean", + "description": "Whether live scraping waited for validator pending TxPool depth to reach zero before querying." + }, + "maxPendingWaitSeconds": { + "type": "integer", + "minimum": 0, + "description": "Maximum time the scraper was allowed to wait for validator pending TxPool depth to reach zero." + }, + "pendingAtScrape": { + "type": ["number", "null"], + "minimum": 0, + "description": "Validator pending TxPool depth observed when scraping started, or null when the pending drain gate was disabled." + }, + "pendingByRoleAtScrape": { + "type": ["object", "null"], + "description": "Pending TxPool depth by pod role at scrape start. RPC/full-node pending can remain non-zero after validators drain, which indicates load that did not propagate to proposers before expiry.", + "additionalProperties": false, + "properties": { + "rpc": { "type": ["number", "null"], "minimum": 0 }, + "validator": { "type": ["number", "null"], "minimum": 0 }, + "fullNode": { "type": ["number", "null"], "minimum": 0 } + } + }, + "pendingWaitTimedOut": { + "type": "boolean", + "description": "True if scraping began because the pending-drain timeout expired." + } + } + } + } + }, + + "summary": { + "type": "object", + "additionalProperties": false, + "required": ["headlineKpi", "inclusionTpsMean", "targetTps"], + "properties": { + "headlineKpi": { + "type": ["number", "null"], + "description": "inclusionTpsMean / targetTps. The single number on the dashboard top strip." + }, + "targetTps": { "type": "number" }, + "inclusionTpsMean": { + "type": ["number", "null"], + "description": "Exact block-log inclusion throughput over the observed inclusion window: totalTxsMined / (inclusionEndedAt - startedAt)." + }, + "inclusionTpsPeak": { + "type": ["number", "null"], + "description": "Peak sampled Prometheus rolling inclusion rate over the observed scrape window." + }, + "inclusionLatencyP50Ms": { "type": ["number", "null"] }, + "inclusionLatencyP95Ms": { "type": ["number", "null"] }, + "inclusionLatencyP99Ms": { "type": ["number", "null"] }, + "blockBuildDurationP50Ms": { "type": ["number", "null"] }, + "blockBuildDurationP95Ms": { "type": ["number", "null"] }, + "publicProcessorTxDurationP50Ms": { "type": ["number", "null"] }, + "publicProcessorTxDurationP95Ms": { "type": ["number", "null"] }, + "totalTxsMined": { "type": ["integer", "null"] }, + "totalTxsFailed": { "type": ["integer", "null"] }, + "totalSilentSkipCount": { + "type": ["integer", "null"], + "description": "Sum of per-block silentlySkippedCount. > 0 means the post-process blob-field revert path fired during the run." + }, + "totalSilentSkipDurationMs": { + "type": ["integer", "null"], + "description": "Sum of per-block silentlySkippedDurationMs. Wall-clock 'wasted' on silently-skipped txs across the run." + }, + "reorgCount": { + "type": ["integer", "null"], + "description": "Count of `Chain pruned` events during the run." + }, + "deepestReorgBlocks": { + "type": ["integer", "null"], + "description": "Max (fromBlock - toBlock) across reorg events. 0 if no reorgs." + } + } + }, + + "timeSeriesSection": { + "type": "object", + "description": "Key = stable slug (used by the dashboard to look up series); value = a series. Slugs decouple the display name from Prometheus metric names (which may be renamed across Aztec versions).", + "additionalProperties": { "$ref": "#/$defs/timeSeries" }, + "properties": { + "inclusionTps": { "$ref": "#/$defs/timeSeries" }, + "ingressTps": { "$ref": "#/$defs/timeSeries" }, + "mempoolSizeRpc": { "$ref": "#/$defs/timeSeries" }, + "mempoolSizeValidator": { "$ref": "#/$defs/timeSeries" }, + "mempoolSizeFullNode": { "$ref": "#/$defs/timeSeries" }, + "mempoolMinedMax": { "$ref": "#/$defs/timeSeries" }, + "mempoolEvictedByReasonRate": { "$ref": "#/$defs/timeSeries" }, + "mempoolRejectedByReasonRate": { "$ref": "#/$defs/timeSeries" }, + "blockBuildDurationP95": { "$ref": "#/$defs/timeSeries" }, + "blockBuildDurationP50": { "$ref": "#/$defs/timeSeries" }, + "publicProcessorTxDurationP95": { "$ref": "#/$defs/timeSeries" }, + "publicProcessorTxDurationP50": { "$ref": "#/$defs/timeSeries" }, + "publicProcessorGasRate": { "$ref": "#/$defs/timeSeries" }, + "checkpointLastBlockToBroadcastP95": { "$ref": "#/$defs/timeSeries" }, + "checkpointBlockCountMean": { "$ref": "#/$defs/timeSeries" }, + "checkpointTxCountMean": { "$ref": "#/$defs/timeSeries" }, + "l1InclusionDelayP95": { "$ref": "#/$defs/timeSeries" }, + "gossipLatencyP95": { "$ref": "#/$defs/timeSeries" }, + "peerCountMean": { "$ref": "#/$defs/timeSeries" }, + "attestationsCollectDurationMean": { "$ref": "#/$defs/timeSeries" }, + "attestationsCollectAllowanceMean": { "$ref": "#/$defs/timeSeries" }, + "txCollectorTxsFromMempoolRate": { "$ref": "#/$defs/timeSeries" }, + "txCollectorTxsFromP2pRate": { "$ref": "#/$defs/timeSeries" }, + "txCollectorMissingRate": { "$ref": "#/$defs/timeSeries" }, + "txCollectorRequestedFractionMean": { "$ref": "#/$defs/timeSeries" }, + "txCollectorRequestDelayP95": { "$ref": "#/$defs/timeSeries" }, + "sequencerStateDurationP95": { "$ref": "#/$defs/timeSeries" } + } + }, + + "timeSeries": { + "type": "object", + "additionalProperties": false, + "required": ["metric", "source", "series"], + "properties": { + "metric": { + "type": "string", + "description": "Prometheus metric name this series was derived from." + }, + "unit": { + "type": "string", + "examples": ["ms", "tps", "mana/s", "count"] + }, + "source": { + "type": "string", + "const": "promql", + "description": "Future-proofing: other series sources may exist (e.g. 'log-aggregated') with different provenance semantics." + }, + "query": { + "type": "string", + "description": "Exact PromQL used. Kept for audit: if a value looks wrong, the query is the first thing to check." + }, + "stepSeconds": { "type": "integer", "minimum": 1 }, + "series": { + "type": "array", + "description": "One entry per Prometheus series returned by the query. Single-series queries (e.g. inclusionTps) emit one entry with empty labels. Multi-series queries (per-pod, per-topic, per-rejection-reason, etc.) emit one entry per label combination.", + "items": { "$ref": "#/$defs/seriesEntry" } + } + } + }, + + "seriesEntry": { + "type": "object", + "additionalProperties": false, + "required": ["labels", "points"], + "properties": { + "labels": { + "type": "object", + "additionalProperties": { "type": "string" }, + "description": "Prometheus labels that disambiguate this series. Empty {} for single-series queries. Common keys: k8s_pod_name, aztec_gossip_topic_name, rejection_reason, aztec_sequencer_state." + }, + "points": { + "type": "array", + "description": "Samples ordered by unixEpoch ascending. Missing samples (Prom didn't scrape in that window) are omitted rather than interpolated — consumers decide how to handle gaps.", + "items": { "$ref": "#/$defs/tsPoint" } + } + } + }, + + "tsPoint": { + "type": "object", + "additionalProperties": false, + "required": ["unixEpoch", "value"], + "properties": { + "unixEpoch": { + "type": "integer", + "description": "Seconds since unix epoch for this sample. Dashboards normalise to time-within-run via unixEpoch - run.startedAt at render time." + }, + "value": { + "type": ["number", "null"], + "description": "Metric value. null if Prom returned NaN / no data for this step." + } + } + }, + + "blockRecord": { + "type": "object", + "additionalProperties": false, + "required": ["blockNumber", "blockNumberInTest", "minedAt"], + "properties": { + "blockNumber": { "type": "integer", "minimum": 0 }, + "blockNumberInTest": { + "type": "integer", + "description": "blockNumber - firstBlockInTest. First block mined after run.startedAt is 0." + }, + "minedAt": { "type": "string", "format": "date-time" }, + "successfulCount": { "type": "integer", "minimum": 0 }, + "failedCount": { "type": "integer", "minimum": 0 }, + "silentlySkippedCount": { + "type": "integer", + "minimum": 0, + "description": "Txs processed to completion then reverted (post-process blob-field limit). Added 2026-04-22." + }, + "silentlySkippedDurationMs": { + "type": "integer", + "minimum": 0, + "description": "Wall-clock time spent on silently-skipped txs in this block. Added 2026-04-22." + }, + "buildDurationSeconds": { "type": "number", "minimum": 0 }, + "totalPublicGas": { + "type": "object", + "additionalProperties": false, + "properties": { + "daGas": { "type": "integer" }, + "l2Gas": { "type": "integer" } + } + }, + "totalSizeInBytes": { "type": "integer", "minimum": 0 }, + "source": { + "type": "string", + "const": "log", + "description": "Source of record. Currently all block records come from the structured 'Processed N successful…' info log." + } + } + }, + + "event": { + "type": "object", + "additionalProperties": false, + "required": ["at", "type"], + "properties": { + "at": { "type": "string", "format": "date-time" }, + "type": { "type": "string", "enum": ["chainPruned", "slotSummary"] }, + "source": { "type": "string", "const": "log" }, + "fromBlock": { + "type": "integer", + "description": "For chainPruned: the pre-prune tip." + }, + "toBlock": { + "type": "integer", + "description": "For chainPruned: the post-prune tip." + }, + "slotNumber": { + "type": "integer", + "description": "For slotSummary: L2 slot number." + }, + "buildSlot": { + "type": "integer", + "description": "For slotSummary: wall-clock slot in which the checkpoint was built." + }, + "checkpointNumber": { "type": "integer" }, + "sourcePod": { "type": "string" }, + "proposer": { + "type": "string", + "description": "Validator/proposer address selected for this slot." + }, + "attestorAddress": { "type": "string" }, + "publisherAddress": { "type": "string" }, + "blocksBuilt": { "type": "number", "minimum": 0 }, + "txCount": { "type": "number", "minimum": 0 }, + "totalMana": { "type": "number", "minimum": 0 }, + "blockBuildFailures": { + "type": "array", + "items": { "type": "object", "additionalProperties": true } + }, + "checkpointBuildFailure": { + "type": "object", + "additionalProperties": true + }, + "attestations": { + "type": "object", + "additionalProperties": true, + "description": "For slotSummary: committee size, required/collected counts, and missing validator addresses when known." + }, + "publish": { + "type": "object", + "additionalProperties": true, + "description": "For slotSummary: checkpoint publish status and L1 publisher action breakdown." + } + } + }, + + "sequencerStateSlot": { + "type": "object", + "additionalProperties": false, + "required": ["slotNumber", "startedAt", "endedAt", "totalMs", "states"], + "properties": { + "slotNumber": { + "type": "integer", + "description": "L2 slot number whose sequencer-state durations are represented." + }, + "startedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp of the first parsed transition for this pod-slot." + }, + "endedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp of the last parsed transition for this pod-slot." + }, + "sourcePod": { + "type": "string", + "description": "Validator pod whose transitions were selected for this slot. The scraper chooses the pod-slot with the strongest proposer-state signal." + }, + "totalMs": { + "type": "number", + "minimum": 0, + "description": "Sum of all state durations in this slot record." + }, + "states": { + "type": "object", + "additionalProperties": { "type": "number", "minimum": 0 }, + "description": "Map from SequencerState name to total milliseconds spent in that state during this slot." + } + } + } + } +} diff --git a/spartan/scripts/bench_10tps/bench_scrape.ts b/spartan/scripts/bench_10tps/bench_scrape.ts new file mode 100755 index 000000000000..bad1304e4cdc --- /dev/null +++ b/spartan/scripts/bench_10tps/bench_scrape.ts @@ -0,0 +1,1620 @@ +#!/usr/bin/env -S node --experimental-strip-types --no-warnings +// +// Scrape a completed bench-10tps run into a schema-conformant JSON payload. +// Contract: bench_output.schema.json (v3). Invoked by the bench_10tps function +// in spartan/bootstrap.sh after n_tps.test.ts finishes. +// +// Two independent scrape paths so one failing does not abort the other: +// 1. Prometheus (port-forward to the cluster-shared metrics-prometheus-server) +// 2. gcloud logging read (per-block and discrete-event records) +// +// Usage: +// ./bench_scrape.ts \ +// --run-id --started --ended \ +// --target-tps 10 --workload sha256_hash_1024 +// +// By default the scraper waits for pending TxPool depth to reach zero before +// finalizing the run. Use --no-wait-for-pending-zero for historical replays +// where the namespace no longer exists. + +import { spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import { readFile, writeFile } from "node:fs/promises"; +import { argv, env, exit, stderr } from "node:process"; +import { setTimeout as sleep } from "node:timers/promises"; + +// --- Config --- + +const NAMESPACE = env.NAMESPACE ?? "bench-10tps"; +const GCP_PROJECT = env.GCP_PROJECT ?? env.GOOGLE_CLOUD_PROJECT; + +if (!GCP_PROJECT) { + throw new Error("Missing GCP_PROJECT env var"); +} + +const GCP_REGION = env.GCP_REGION ?? "us-west1-a"; +const GKE_CLUSTER = env.CLUSTER ?? "aztec-gke-private"; +// Prometheus is cluster-shared in the "metrics" namespace, not per-environment. +const PROM_NS = env.PROM_NS ?? "metrics"; +const PROM_SERVICE = env.PROM_SERVICE ?? "metrics-prometheus-server"; +const PROM_PORT = Number(env.PROM_PORT ?? 9090); +const STEP_SECONDS = 15; +const DRAIN_BUFFER_SECONDS = 90; // OTel batch push 60s + one Prom scrape 15s + slack +const PENDING_POLL_SECONDS = 30; +const DEFAULT_MAX_PENDING_WAIT_SECONDS = 60 * 60; + +// --- CLI --- + +type Args = { + runId: string; + startedAt: string; + endedAt: string; + targetTps: number; + workload: string; + output: string | undefined; + waitForPendingZero: boolean; + maxPendingWaitSeconds: number; +}; + +function parseArgs(): Args { + const get = (flag: string, fallback?: string) => { + const i = argv.indexOf(flag); + if (i === -1) { + if (fallback !== undefined) { + return fallback; + } + throw new Error(`Missing required flag ${flag}`); + } + return argv[i + 1]; + }; + return { + runId: get("--run-id", env.BENCH_RUN_ID ?? randomUUID()), + startedAt: get("--started"), + endedAt: get("--ended"), + targetTps: Number(get("--target-tps", "10")), + workload: get("--workload", "sha256_hash_1024"), + output: + argv.indexOf("--output") === -1 + ? undefined + : argv[argv.indexOf("--output") + 1], + waitForPendingZero: + !argv.includes("--no-wait-for-pending-zero") && + (argv.includes("--wait-for-pending-zero") || + env.BENCH_SCRAPE_WAIT_FOR_PENDING_ZERO !== "0"), + maxPendingWaitSeconds: Number( + get( + "--max-pending-wait-seconds", + env.BENCH_SCRAPE_MAX_PENDING_WAIT_SECONDS ?? + String(DEFAULT_MAX_PENDING_WAIT_SECONDS), + ), + ), + }; +} + +function log(msg: string, extra?: unknown): void { + stderr.write( + `[scrape] ${msg}${extra === undefined ? "" : " " + JSON.stringify(extra)}\n`, + ); +} + +// --- Port-forward --- + +async function portForwardProm(): Promise<() => void> { + const child = spawn( + "kubectl", + ["-n", PROM_NS, "port-forward", `svc/${PROM_SERVICE}`, `${PROM_PORT}:80`], + { stdio: ["ignore", "pipe", "pipe"] }, + ); + await new Promise((resolve, reject) => { + const onData = (buf: Buffer) => { + if (buf.toString().includes("Forwarding from")) { + resolve(); + } + }; + child.stdout?.on("data", onData); + child.stderr?.on("data", onData); + child.on("exit", (code, signal) => + reject(new Error(`port-forward exited: ${code}/${signal}`)), + ); + setTimeout(() => reject(new Error("port-forward timeout")), 15_000); + }); + return () => child.kill(); +} + +// --- PromQL client --- + +type TsPoint = { + unixEpoch: number; + value: number | null; +}; + +type SeriesEntry = { + labels: Record; + points: TsPoint[]; +}; + +const parseValue = (v: string | undefined): number | null => + v === undefined || v === "NaN" ? null : Number(v); + +async function queryInstant( + promql: string, + tEpoch: number, +): Promise { + const url = + `http://localhost:${PROM_PORT}/api/v1/query` + + `?query=${encodeURIComponent(promql)}&time=${tEpoch}`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`instant query ${res.status} ${res.statusText}: ${promql}`); + } + const json = (await res.json()) as { + data?: { result?: Array<{ value?: [number, string] }> }; + }; + return parseValue(json.data?.result?.[0]?.value?.[1]); +} + +async function queryRange( + promql: string, + startEpoch: number, + endEpoch: number, + stepSeconds = STEP_SECONDS, +): Promise { + const url = + `http://localhost:${PROM_PORT}/api/v1/query_range` + + `?query=${encodeURIComponent(promql)}` + + `&start=${startEpoch}&end=${endEpoch}&step=${stepSeconds}`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`range query ${res.status} ${res.statusText}: ${promql}`); + } + const json = (await res.json()) as { + data?: { + result?: Array<{ + metric?: Record; + values?: Array<[number, string]>; + }>; + }; + }; + const results = json.data?.result ?? []; + return results.map(({ metric, values }) => ({ + labels: metric ?? {}, + points: (values ?? []).map(([t, v]) => ({ + unixEpoch: Math.round(t), + value: parseValue(v), + })), + })); +} + +// --- Time-series definitions --- +// Every query includes the namespace filter: Prometheus is cluster-shared across +// all Aztec deployments, so un-filtered queries would pick up data from mbps-pipe, +// staging-v4-1, nightly-block-capacity, etc. + +const NS = `{k8s_namespace_name="${NAMESPACE}"}`; +const pendingTxsQueryForRole = (role: string) => + `max(aztec_mempool_tx_count{k8s_namespace_name="${NAMESPACE}",aztec_pool_name="TxPool",aztec_status="pending",k8s_pod_name=~"${NAMESPACE}-${role}.*"})`; +const PENDING_RPC_TXS_QUERY = pendingTxsQueryForRole("rpc"); +const PENDING_VALIDATOR_TXS_QUERY = pendingTxsQueryForRole("validator"); +const PENDING_FULL_NODE_TXS_QUERY = pendingTxsQueryForRole("full-node"); +const histQuantile = (q: number, bucket: string, groupBy: string[] = []) => { + const groupKeys = ["le", ...groupBy].join(", "); + return `histogram_quantile(${q}, sum by (${groupKeys})(rate(${bucket}${NS}[1m])))`; +}; + +type TimeSeriesDef = { metric: string; unit: string; query: string }; + +type PreviousRunContext = { + image?: string; + aztecConfig?: Record; +}; + +async function loadPreviousRunContext( + output: string | undefined, +): Promise { + if (!output) { + return {}; + } + try { + const existing = JSON.parse(await readFile(output, "utf8")) as { + run?: PreviousRunContext; + }; + return existing.run ?? {}; + } catch { + return {}; + } +} + +const TIME_SERIES_DEFS: Record = { + // aztec_archiver_block_tx_count is a histogram where each observation is + // "this block contained N txs" — _sum is total txs observed, _count is total + // blocks observed. Every archiver (RPC + every validator + every full node) + // observes the same block, so sum() across pods over-counts by the number of + // archivers. avg() gives the true per-block rate since all archivers see the + // same canonical chain. + inclusionTps: { + metric: "aztec_archiver_block_tx_count_sum", + unit: "tps", + query: `avg(rate(aztec_archiver_block_tx_count_sum${NS}[1m]))`, + }, + // aztec_node_receive_tx_count is only incremented on the RPC node that the + // load generator hits, so sum() is fine here — there's only one non-zero + // series. (Full nodes receive via gossip, not ReceiveTx.) + ingressTps: { + metric: "aztec_node_receive_tx_count", + unit: "tps", + query: `sum(rate(aztec_node_receive_tx_count${NS}[1m]))`, + }, + // Pending mempool size sliced by pod role. Three single-series slugs make cross-run + // overlay clean: pod names are unstable (replica counts and restart suffixes + // change between runs) but role is stable. Each query filters to TxPool to + // avoid mixing in the AttestationPool counters that share the metric name, and + // to pending status so max() does not collapse pending/protected/mined/softDeleted. + // max() over a role collapses the per-pod fan-out — for an under-fill + // investigation we care about the role's deepest backlog at any moment. + mempoolSizeRpc: { + metric: "aztec_mempool_tx_count", + unit: "count", + query: `max(aztec_mempool_tx_count{k8s_namespace_name="${NAMESPACE}",aztec_pool_name="TxPool",aztec_status="pending",k8s_pod_name=~"${NAMESPACE}-rpc.*"})`, + }, + mempoolSizeValidator: { + metric: "aztec_mempool_tx_count", + unit: "count", + query: `max(aztec_mempool_tx_count{k8s_namespace_name="${NAMESPACE}",aztec_pool_name="TxPool",aztec_status="pending",k8s_pod_name=~"${NAMESPACE}-validator.*"})`, + }, + mempoolSizeFullNode: { + metric: "aztec_mempool_tx_count", + unit: "count", + query: `max(aztec_mempool_tx_count{k8s_namespace_name="${NAMESPACE}",aztec_pool_name="TxPool",aztec_status="pending",k8s_pod_name=~"${NAMESPACE}-full-node.*"})`, + }, + mempoolMinedMax: { + metric: "aztec_mempool_tx_count", + unit: "count", + query: `max(aztec_mempool_tx_count{k8s_namespace_name="${NAMESPACE}",aztec_pool_name="TxPool",aztec_status="mined"})`, + }, + mempoolEvictedByReasonRate: { + metric: "aztec_mempool_tx_pool_v2_evicted_count", + unit: "tps", + query: `sum by (aztec_mempool_eviction_reason)(rate(aztec_mempool_tx_pool_v2_evicted_count${NS}[1m]))`, + }, + mempoolRejectedByReasonRate: { + metric: "aztec_mempool_tx_pool_v2_rejected_count", + unit: "tps", + // Rejections currently do not carry a reason label, unlike evictions. + query: `sum(rate(aztec_mempool_tx_pool_v2_rejected_count${NS}[1m]))`, + }, + blockBuildDurationP50: { + metric: "aztec_sequencer_block_build_duration_milliseconds", + unit: "ms", + query: histQuantile( + 0.5, + "aztec_sequencer_block_build_duration_milliseconds_bucket", + ), + }, + blockBuildDurationP95: { + metric: "aztec_sequencer_block_build_duration_milliseconds", + unit: "ms", + query: histQuantile( + 0.95, + "aztec_sequencer_block_build_duration_milliseconds_bucket", + ), + }, + publicProcessorTxDurationP50: { + metric: "aztec_public_processor_tx_duration_milliseconds", + unit: "ms", + query: histQuantile( + 0.5, + "aztec_public_processor_tx_duration_milliseconds_bucket", + ), + }, + publicProcessorTxDurationP95: { + metric: "aztec_public_processor_tx_duration_milliseconds", + unit: "ms", + query: histQuantile( + 0.95, + "aztec_public_processor_tx_duration_milliseconds_bucket", + ), + }, + publicProcessorGasRate: { + metric: "aztec_public_processor_gas_rate_per_second", + unit: "mana/s", + // gas_rate is a histogram of per-block public-processor L2 mana/s. The + // total_gas metric is a gauge, so rate(total_gas) is not meaningful. + query: `sum(rate(aztec_public_processor_gas_rate_per_second_sum{k8s_namespace_name="${NAMESPACE}",aztec_gas_dimension="L2"}[1m])) / sum(rate(aztec_public_processor_gas_rate_per_second_count{k8s_namespace_name="${NAMESPACE}",aztec_gas_dimension="L2"}[1m]))`, + }, + checkpointLastBlockToBroadcastP95: { + metric: + "aztec_sequencer_checkpoint_last_block_to_broadcast_duration_milliseconds", + unit: "ms", + query: histQuantile( + 0.95, + "aztec_sequencer_checkpoint_last_block_to_broadcast_duration_milliseconds_bucket", + ), + }, + // Archiver exports this as seconds into the L2 slot when the checkpoint L1 + // tx was included, not submit→mined latency. Convert to ms so the dashboard + // can use the same duration formatting as the other build-internals panels. + l1InclusionDelayP95: { + metric: "aztec_archiver_checkpoint_l1_inclusion_delay_seconds", + unit: "ms", + query: `${histQuantile(0.95, "aztec_archiver_checkpoint_l1_inclusion_delay_seconds_bucket")} * 1000`, + }, + // Multi-series by topic (tx, block_proposal, attestation, …). + gossipLatencyP95: { + metric: "aztec_p2p_gossip_message_latency_milliseconds", + unit: "ms", + query: histQuantile( + 0.95, + "aztec_p2p_gossip_message_latency_milliseconds_bucket", + ["aztec_gossip_topic_name"], + ), + }, + peerCountMean: { + metric: "aztec_peer_manager_peer_count_peers", + unit: "count", + query: `avg(aztec_peer_manager_peer_count_peers${NS})`, + }, + attestationsCollectDurationMean: { + metric: "aztec_sequencer_attestations_collect_duration_milliseconds", + unit: "ms", + // This metric is exported as a gauge, not a histogram. + query: `avg(aztec_sequencer_attestations_collect_duration_milliseconds${NS})`, + }, + attestationsCollectAllowanceMean: { + metric: "aztec_sequencer_attestations_collect_allowance_milliseconds", + unit: "ms", + // The metric is declared/exported as milliseconds, but current sequencer + // code records attestationTimeAllowed in seconds. + query: `avg(aztec_sequencer_attestations_collect_allowance_milliseconds${NS}) * 1000`, + }, + checkpointBlockCountMean: { + metric: "aztec_sequencer_checkpoint_block_count", + unit: "count", + query: `avg(aztec_sequencer_checkpoint_block_count${NS})`, + }, + checkpointTxCountMean: { + metric: "aztec_sequencer_checkpoint_tx_count", + unit: "count", + query: `avg(aztec_sequencer_checkpoint_tx_count${NS})`, + }, + // tx_collector signals: each node's view of where proposal txs came from. + // These counters are emitted by every node reconstructing/validating blocks; + // avg() keeps this as a per-node view instead of multiplying by node count. + txCollectorTxsFromProposalRate: { + metric: "aztec_tx_collector_txs_from_proposal_count", + unit: "tps", + query: `avg(rate(aztec_tx_collector_txs_from_proposal_count${NS}[1m]))`, + }, + txCollectorTxsFromMempoolRate: { + metric: "aztec_tx_collector_txs_from_mempool_count", + unit: "tps", + query: `avg(rate(aztec_tx_collector_txs_from_mempool_count${NS}[1m]))`, + }, + txCollectorTxsFromP2pRate: { + metric: "aztec_tx_collector_txs_from_p2p_count", + unit: "tps", + query: `avg(rate(aztec_tx_collector_txs_from_p2p_count${NS}[1m]))`, + }, + txCollectorMissingRate: { + metric: "aztec_tx_collector_missing_txs_count", + unit: "tps", + query: `avg(rate(aztec_tx_collector_missing_txs_count${NS}[1m]))`, + }, + txCollectorRequestedFractionMean: { + metric: "aztec_tx_collector_txs_requested_fraction", + unit: "fraction", + // Exported as a histogram even though the observation is already a fraction. + query: `sum(rate(aztec_tx_collector_txs_requested_fraction_sum${NS}[1m])) / sum(rate(aztec_tx_collector_txs_requested_fraction_count${NS}[1m]))`, + }, + txCollectorRequestDelayP95: { + metric: "aztec_tx_collector_txs_requested_delay_milliseconds", + unit: "ms", + query: histQuantile( + 0.95, + "aztec_tx_collector_txs_requested_delay_milliseconds_bucket", + ), + }, + // Time-in-state per sequencer state. Empty until the new image carrying + // aztec.sequencer.state_duration ships to bench-10tps. Multi-series by state. + sequencerStateDurationP95: { + metric: "aztec_sequencer_state_duration_milliseconds", + unit: "ms", + query: histQuantile( + 0.95, + "aztec_sequencer_state_duration_milliseconds_bucket", + ["aztec_sequencer_state"], + ), + }, +}; + +async function scrapeTimeSeries( + startedAtEpoch: number, + endedAtEpoch: number, +): Promise> { + const out: Record = {}; + for (const [slug, def] of Object.entries(TIME_SERIES_DEFS)) { + try { + const series = await queryRange(def.query, startedAtEpoch, endedAtEpoch); + out[slug] = { + metric: def.metric, + unit: def.unit, + source: "promql", + query: def.query, + stepSeconds: STEP_SECONDS, + series, + }; + } catch (err) { + log(`timeSeries.${slug} scrape failed, emitting empty series`, { + err: err instanceof Error ? err.message : String(err), + }); + out[slug] = { + metric: def.metric, + unit: def.unit, + source: "promql", + query: def.query, + stepSeconds: STEP_SECONDS, + series: [], + }; + } + } + return out; +} + +// --- gcloud log scrape --- + +type GcloudEntry = { + timestamp: string; + jsonPayload?: Record; + resource?: { labels?: { pod_name?: string } }; +}; + +async function gcloudRead(filter: string): Promise { + return new Promise((resolve, reject) => { + const child = spawn( + "gcloud", + [ + "logging", + "read", + filter, + "--format=json", + "--order=asc", + "--freshness=24h", + "--limit=50000", + ], + { stdio: ["ignore", "pipe", "pipe"] }, + ); + const chunks: Buffer[] = []; + const errChunks: Buffer[] = []; + child.stdout.on("data", (c) => chunks.push(c)); + child.stderr.on("data", (c) => errChunks.push(c)); + child.on("exit", (code) => { + if (code !== 0) { + reject( + new Error( + `gcloud logging read exited ${code}: ${Buffer.concat(errChunks).toString()}`, + ), + ); + return; + } + try { + resolve(JSON.parse(Buffer.concat(chunks).toString() || "[]")); + } catch (err) { + reject(err); + } + }); + }); +} + +const timeFilter = (startedAt: string, endedAt: string) => + `timestamp >= "${startedAt}" AND timestamp <= "${endedAt}"`; + +// --- Run-context capture (image + aztec config env) --- + +// Curated subset of env vars worth recording per run so the dashboard can +// show e.g. "pool=20k vs pool=1000" alongside two compared runs. Anything not +// in this list is excluded — full env would be huge and mostly uninteresting. +const AZTEC_CONFIG_KEYS = [ + "SEQ_ENABLE_PROPOSER_PIPELINING", + "SEQ_BLOCK_DURATION_MS", + "SEQ_MAX_TX_PER_BLOCK", + "SEQ_MAX_TX_PER_CHECKPOINT", + "SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT", + "SEQ_BUILD_CHECKPOINT_IF_EMPTY", + "AZTEC_MANA_TARGET", + "P2P_MAX_PENDING_TX_COUNT", + "AZTEC_EPOCH_DURATION", + "AZTEC_SLOT_DURATION", + "AZTEC_LAG_IN_EPOCHS_FOR_VALIDATOR_SET", + "LOG_LEVEL", +]; + +async function runKubectl(args: string[]): Promise { + return new Promise((resolve, reject) => { + const child = spawn("kubectl", args, { + stdio: ["ignore", "pipe", "pipe"], + }); + const chunks: Buffer[] = []; + const errs: Buffer[] = []; + child.stdout.on("data", (c) => chunks.push(c)); + child.stderr.on("data", (c) => errs.push(c)); + child.on("exit", (code) => { + if (code !== 0) { + reject( + new Error( + `kubectl ${args.join(" ")} exited ${code}: ${Buffer.concat(errs).toString()}`, + ), + ); + return; + } + resolve(Buffer.concat(chunks).toString()); + }); + }); +} + +async function captureImage(): Promise { + try { + const out = await runKubectl([ + "get", + "statefulset", + `${NAMESPACE}-validator`, + "-n", + NAMESPACE, + "-o", + 'jsonpath={.spec.template.spec.containers[?(@.name=="aztec")].image}', + ]); + const trimmed = out.trim(); + return trimmed.length > 0 ? trimmed : undefined; + } catch (err) { + log("image capture failed", { + err: err instanceof Error ? err.message : String(err), + }); + return undefined; + } +} + +async function captureAztecConfig(): Promise> { + try { + const out = await runKubectl([ + "exec", + `${NAMESPACE}-validator-0`, + "-c", + "aztec", + "-n", + NAMESPACE, + "--", + "printenv", + ]); + const aztecConfig: Record = {}; + for (const line of out.split("\n")) { + const idx = line.indexOf("="); + if (idx < 0) { + continue; + } + const key = line.slice(0, idx); + if (AZTEC_CONFIG_KEYS.includes(key)) { + aztecConfig[key] = line.slice(idx + 1); + } + } + return aztecConfig; + } catch (err) { + log("aztec config env capture failed", { + err: err instanceof Error ? err.message : String(err), + }); + return {}; + } +} + +type BlockRecord = { + blockNumber: number; + blockNumberInTest: number; + minedAt: string; + successfulCount: number; + failedCount: number; + silentlySkippedCount: number; + silentlySkippedDurationMs: number; + buildDurationSeconds: number; + totalPublicGas?: { daGas: number; l2Gas: number }; + totalSizeInBytes?: number; + source: "log"; +}; + +async function scrapeBlocks( + startedAt: string, + endedAt: string, +): Promise { + const filter = [ + `resource.labels.namespace_name="${NAMESPACE}"`, + `resource.labels.pod_name=~"${NAMESPACE}-(validator|rpc).*"`, + `jsonPayload.message=~"^Processed [0-9]+ successful txs and"`, + timeFilter(startedAt, endedAt), + ].join(" AND "); + const entries = await gcloudRead(filter); + + // Each block is logged once per pod that processed it (validators + + // RPC-colocated full node sync). Dedupe by blockNumber, keep the earliest + // timestamp — that's most likely the proposer who built the block. + const byBlock = new Map(); + for (const entry of entries) { + const p = entry.jsonPayload; + if (!p) { + continue; + } + const blockNumber = + typeof p.blockNumber === "number" + ? p.blockNumber + : typeof p.blockNumber === "string" + ? Number(p.blockNumber) + : NaN; + if (!Number.isFinite(blockNumber)) { + continue; + } + const t = Date.parse(entry.timestamp); + const prev = byBlock.get(blockNumber); + if (!prev || t < prev.time) { + byBlock.set(blockNumber, { entry, time: t }); + } + } + + if (byBlock.size === 0) { + return []; + } + const blockNumbers = [...byBlock.keys()].sort((a, b) => a - b); + const first = blockNumbers[0]; + + return blockNumbers.map((bn) => { + const { entry } = byBlock.get(bn)!; + const p = entry.jsonPayload!; + return { + blockNumber: bn, + blockNumberInTest: bn - first, + minedAt: entry.timestamp, + successfulCount: Number(p.successfulCount ?? 0), + failedCount: Number(p.failedCount ?? 0), + silentlySkippedCount: Number(p.silentlySkippedCount ?? 0), + silentlySkippedDurationMs: Number(p.silentlySkippedDurationMs ?? 0), + buildDurationSeconds: Number(p.duration ?? 0), + totalPublicGas: p.totalPublicGas as + | { daGas: number; l2Gas: number } + | undefined, + totalSizeInBytes: + typeof p.totalSizeInBytes === "number" ? p.totalSizeInBytes : undefined, + source: "log", + }; + }); +} + +type ChainPrunedEvent = { + at: string; + type: "chainPruned"; + source: "log"; + fromBlock?: number; + toBlock?: number; +}; + +type SlotSummaryEvent = { + at: string; + type: "slotSummary"; + source: "log"; + slotNumber: number; + buildSlot?: number; + checkpointNumber?: number; + sourcePod?: string; + proposer?: string; + attestorAddress?: string; + publisherAddress?: string; + blocksBuilt?: number; + txCount?: number; + totalMana?: number; + blockBuildFailures?: Array>; + checkpointBuildFailure?: Record; + attestations?: Record; + publish?: Record; +}; + +type Event = ChainPrunedEvent | SlotSummaryEvent; + +type SequencerStateSlot = { + slotNumber: number; + startedAt: string; + endedAt: string; + sourcePod?: string; + totalMs: number; + states: Record; +}; + +const CHAIN_PRUNED_MSG = /Chain pruned to block (\d+)/; + +async function scrapeEvents( + startedAt: string, + endedAt: string, + blocks: BlockRecord[], +): Promise { + const [chainPruned, slotSummaries] = await Promise.all([ + scrapeChainPrunedEvents(startedAt, endedAt, blocks), + scrapeSlotSummaryEvents(startedAt, endedAt), + ]); + return [...chainPruned, ...slotSummaries].sort( + (a, b) => Date.parse(a.at) - Date.parse(b.at), + ); +} + +async function scrapeChainPrunedEvents( + startedAt: string, + endedAt: string, + blocks: BlockRecord[], +): Promise { + const filter = [ + `resource.labels.namespace_name="${NAMESPACE}"`, + `jsonPayload.message=~"Chain pruned to block [0-9]+"`, + timeFilter(startedAt, endedAt), + ].join(" AND "); + const entries = await gcloudRead(filter); + + // Same real prune is logged by every node as they catch up — multiple log + // lines with ms-spaced timestamps for the same toBlock. But toBlock is not a + // unique key across a run: if proposals fail in consecutive slots after a + // first prune, the chain can re-prune to the same block. Dedupe by (toBlock + // within a 30s window of the earliest observation): preserves distinct + // prune events, collapses the per-node log fan-out. + const DEDUPE_WINDOW_MS = 30_000; + const parsed = entries + .map((entry) => { + const msg = (entry.jsonPayload?.message as string | undefined) ?? ""; + const m = CHAIN_PRUNED_MSG.exec(msg); + if (!m) { + return null; + } + return { + at: entry.timestamp, + time: Date.parse(entry.timestamp), + toBlock: Number(m[1]), + }; + }) + .filter( + (x): x is { at: string; time: number; toBlock: number } => x !== null, + ) + .sort((a, b) => a.time - b.time); + + const deduped: typeof parsed = []; + for (const e of parsed) { + const sameEvent = deduped.find( + (prev) => + prev.toBlock === e.toBlock && e.time - prev.time < DEDUPE_WINDOW_MS, + ); + if (!sameEvent) { + deduped.push(e); + } + } + + return deduped.map(({ at, time, toBlock }) => { + // fromBlock is reconstructed because server_world_state_synchronizer.ts:459 + // doesn't log it structurally. Correlate with the latest block we've seen + // at or before the prune timestamp. + const before = blocks + .filter((b) => Date.parse(b.minedAt) <= time) + .reduce((max, b) => (b.blockNumber > max ? b.blockNumber : max), 0); + return { + at, + type: "chainPruned" as const, + source: "log" as const, + fromBlock: before || undefined, + toBlock, + }; + }); +} + +async function scrapeSlotSummaryEvents( + startedAt: string, + endedAt: string, +): Promise { + const filter = [ + `resource.labels.namespace_name="${NAMESPACE}"`, + `resource.labels.pod_name=~"${NAMESPACE}-validator.*"`, + `jsonPayload.eventName=~"^(benchmark-|sequencer-checkpoint-)"`, + timeFilter(startedAt, endedAt), + ].join(" AND "); + const entries = await gcloudRead(filter); + + const bySlot = new Map(); + for (const entry of entries) { + const p = entry.jsonPayload; + const slotNumber = numberField(p?.slot); + if (!p || !Number.isFinite(slotNumber)) { + continue; + } + const eventName = normalizeSlotSummaryEventName(String(p.eventName ?? "")); + if (eventName === undefined) { + continue; + } + const event = getOrCreateSlotSummary(bySlot, slotNumber, entry); + + if (eventName === "slot-started") { + assignDefined(event, { + buildSlot: numberOrUndefined(p.buildSlot), + checkpointNumber: numberOrUndefined(p.checkpointNumber), + proposer: stringOrUndefined(p.proposer), + attestorAddress: stringOrUndefined(p.attestorAddress), + publisherAddress: stringOrUndefined(p.publisherAddress), + }); + } else if (eventName === "checkpoint-built") { + assignDefined(event, { + buildSlot: numberOrUndefined(p.buildSlot), + checkpointNumber: numberOrUndefined(p.checkpointNumber), + proposer: stringOrUndefined(p.proposer), + attestorAddress: stringOrUndefined(p.attestorAddress), + publisherAddress: stringOrUndefined(p.publisherAddress), + blocksBuilt: numberOrUndefined(p.blocksBuilt), + txCount: numberOrUndefined(p.txCount), + totalMana: numberOrUndefined(p.totalMana), + }); + } else if (eventName === "block-build-failed") { + event.blockBuildFailures ??= []; + event.blockBuildFailures.push( + compactObject({ + at: entry.timestamp, + reason: stringOrUndefined(p.reason), + blockNumber: numberOrUndefined(p.blockNumber), + checkpointNumber: numberOrUndefined(p.checkpointNumber), + indexWithinCheckpoint: numberOrUndefined(p.indexWithinCheckpoint), + availableTxs: numberOrUndefined(p.availableTxs), + minTxs: numberOrUndefined(p.minTxs), + minValidTxs: numberOrUndefined(p.minValidTxs), + numTxs: numberOrUndefined(p.numTxs), + }), + ); + } else if (eventName === "checkpoint-build-failed") { + event.checkpointBuildFailure = compactObject({ + at: entry.timestamp, + reason: stringOrUndefined(p.reason), + checkpointNumber: numberOrUndefined(p.checkpointNumber), + blocksBuilt: numberOrUndefined(p.blocksBuilt), + minBlocksForCheckpoint: numberOrUndefined(p.minBlocksForCheckpoint), + }); + } else if ( + eventName === "attestations-collected" || + eventName === "attestations-failed" + ) { + event.attestations = compactObject({ + status: eventName === "attestations-collected" ? "collected" : "failed", + checkpointNumber: numberOrUndefined(p.checkpointNumber), + committeeSize: numberOrUndefined(p.committeeSize), + requiredAttestations: numberOrUndefined(p.requiredAttestations), + collectedAttestations: numberOrUndefined(p.collectedAttestations), + submittedAttestations: numberOrUndefined(p.submittedAttestations), + missingValidatorCount: numberOrUndefined(p.missingValidatorCount), + missingValidators: stringArrayOrUndefined(p.missingValidators), + reason: stringOrUndefined(p.reason), + }); + } else if ( + eventName === "checkpoint-published" || + eventName === "checkpoint-publish-failed" + ) { + event.publish = compactObject({ + status: eventName === "checkpoint-published" ? "published" : "failed", + checkpointNumber: numberOrUndefined(p.checkpointNumber), + successfulActions: stringArrayOrUndefined(p.successfulActions), + failedActions: stringArrayOrUndefined(p.failedActions), + sentActions: stringArrayOrUndefined(p.sentActions), + expiredActions: stringArrayOrUndefined(p.expiredActions), + reason: stringOrUndefined(p.reason), + }); + } + } + + return [...bySlot.values()].sort((a, b) => a.slotNumber - b.slotNumber); +} + +function normalizeSlotSummaryEventName(eventName: string): string | undefined { + if (eventName.startsWith("benchmark-")) { + return eventName.slice("benchmark-".length); + } + if (!eventName.startsWith("sequencer-checkpoint-")) { + return undefined; + } + + const name = eventName.slice("sequencer-checkpoint-".length); + const aliases: Record = { + built: "checkpoint-built", + "build-failed": "checkpoint-build-failed", + published: "checkpoint-published", + "publish-failed": "checkpoint-publish-failed", + }; + return aliases[name] ?? name; +} + +function getOrCreateSlotSummary( + bySlot: Map, + slotNumber: number, + entry: GcloudEntry, +): SlotSummaryEvent { + const existing = bySlot.get(slotNumber); + if (existing) { + if (Date.parse(entry.timestamp) < Date.parse(existing.at)) { + existing.at = entry.timestamp; + } + return existing; + } + const created: SlotSummaryEvent = { + at: entry.timestamp, + type: "slotSummary", + source: "log", + slotNumber, + sourcePod: entry.resource?.labels?.pod_name, + }; + bySlot.set(slotNumber, created); + return created; +} + +function assignDefined( + target: Record, + values: Record, +): void { + for (const [key, value] of Object.entries(values)) { + if (value !== undefined) { + target[key] = value; + } + } +} + +function compactObject>(obj: T): T { + for (const key of Object.keys(obj)) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + return obj; +} + +function numberOrUndefined(v: unknown): number | undefined { + const n = numberField(v); + return Number.isFinite(n) ? n : undefined; +} + +function stringOrUndefined(v: unknown): string | undefined { + return typeof v === "string" && v !== "" ? v : undefined; +} + +function stringArrayOrUndefined(v: unknown): string[] | undefined { + if (!Array.isArray(v)) { + return undefined; + } + return v.map(String); +} + +const SEQUENCER_STATE_MSG = /^Transitioning from ([A-Z_]+) to ([A-Z_]+)/; +const PROPOSER_STATE_SCORE = new Set([ + "INITIALIZING_CHECKPOINT", + "WAITING_FOR_TXS", + "CREATING_BLOCK", + "WAITING_UNTIL_NEXT_BLOCK", + "ASSEMBLING_CHECKPOINT", + "COLLECTING_ATTESTATIONS", + "PUBLISHING_CHECKPOINT", +]); + +async function scrapeSequencerStateSlots( + startedAt: string, + endedAt: string, +): Promise { + const filter = [ + `resource.labels.namespace_name="${NAMESPACE}"`, + `resource.labels.pod_name=~"${NAMESPACE}-validator.*"`, + `jsonPayload.message=~"^Transitioning from "`, + timeFilter(startedAt, endedAt), + ].join(" AND "); + const entries = await gcloudRead(filter); + + type PodSlot = { + slotNumber: number; + sourcePod?: string; + startedAt: string; + endedAt: string; + firstTime: number; + lastTime: number; + states: Record; + }; + + const byPodSlot = new Map(); + + for (const entry of entries) { + const p = entry.jsonPayload; + if (!p) { + continue; + } + const message = String(p.message ?? ""); + const match = SEQUENCER_STATE_MSG.exec(message); + const state = + typeof p.oldState === "string" ? p.oldState : (match?.[1] ?? ""); + const slotNumber = numberField(p.stateSlotNumber); + const durationMs = numberField(p.stateDurationMs); + if ( + !state || + !Number.isFinite(slotNumber) || + !Number.isFinite(durationMs) + ) { + continue; + } + const time = Date.parse(entry.timestamp); + if (!Number.isFinite(time)) { + continue; + } + const podName = entry.resource?.labels?.pod_name; + const key = `${podName ?? "unknown"}:${slotNumber}`; + const current = byPodSlot.get(key); + if (!current) { + byPodSlot.set(key, { + slotNumber, + sourcePod: podName, + startedAt: entry.timestamp, + endedAt: entry.timestamp, + firstTime: time, + lastTime: time, + states: { [state]: durationMs }, + }); + continue; + } + current.states[state] = (current.states[state] ?? 0) + durationMs; + if (time < current.firstTime) { + current.firstTime = time; + current.startedAt = entry.timestamp; + } + if (time > current.lastTime) { + current.lastTime = time; + current.endedAt = entry.timestamp; + } + } + + // Multiple validator pods can log sequencer transitions for the same slot. + // For the benchmark chart we want the proposer path, so choose the pod-slot + // with the most time in checkpoint/block-production states. + const bestBySlot = new Map(); + for (const candidate of byPodSlot.values()) { + const prev = bestBySlot.get(candidate.slotNumber); + if (!prev || podSlotScore(candidate) > podSlotScore(prev)) { + bestBySlot.set(candidate.slotNumber, candidate); + } + } + + return [...bestBySlot.values()] + .sort((a, b) => a.slotNumber - b.slotNumber) + .map((slot) => { + const totalMs = Object.values(slot.states).reduce((a, b) => a + b, 0); + return { + slotNumber: slot.slotNumber, + startedAt: slot.startedAt, + endedAt: slot.endedAt, + ...(slot.sourcePod !== undefined && { sourcePod: slot.sourcePod }), + totalMs, + states: slot.states, + }; + }); +} + +function podSlotScore(slot: { states: Record }): number { + let score = 0; + for (const [state, durationMs] of Object.entries(slot.states)) { + score += PROPOSER_STATE_SCORE.has(state) ? durationMs * 10 : durationMs; + } + return score; +} + +function numberField(v: unknown): number { + if (typeof v === "number") { + return Number.isFinite(v) ? v : NaN; + } + if (typeof v === "string" && v.trim() !== "") { + const n = Number(v); + return Number.isFinite(n) ? n : NaN; + } + return NaN; +} + +// --- Summary --- + +const meanNonNull = (points: TsPoint[]): number | null => { + const vals = points + .map((p) => p.value) + .filter((v): v is number => v !== null && Number.isFinite(v)); + if (vals.length === 0) { + return null; + } + return vals.reduce((a, b) => a + b, 0) / vals.length; +}; + +const maxNonNull = (points: TsPoint[]): number | null => { + const vals = points + .map((p) => p.value) + .filter((v): v is number => v !== null && Number.isFinite(v)); + return vals.length === 0 ? null : Math.max(...vals); +}; + +type SummaryArgs = { + targetTps: number; + startedAtEpoch: number; + inclusionEndedAtEpoch: number; + windowSec: number; + histogramWindowSec: number; + endedAtEpoch: number; + timeSeries: Record; + blocks: BlockRecord[]; + events: Event[]; +}; + +async function buildSummary(a: SummaryArgs): Promise> { + // inclusionTps is single-series; series[0] holds all points. + const inclusionPoints = ( + a.timeSeries.inclusionTps?.series?.[0]?.points ?? [] + ).filter( + (p) => + p.unixEpoch >= a.startedAtEpoch && p.unixEpoch <= a.inclusionEndedAtEpoch, + ); + const inclusionBlocks = a.blocks.filter((b) => { + const minedAtEpoch = Math.floor(Date.parse(b.minedAt) / 1000); + return ( + Number.isFinite(minedAtEpoch) && + minedAtEpoch >= a.startedAtEpoch && + minedAtEpoch <= a.inclusionEndedAtEpoch + ); + }); + const totalTxsMined = inclusionBlocks.reduce( + (s, b) => s + b.successfulCount, + 0, + ); + const inclusionTpsMean = + a.windowSec > 0 + ? totalTxsMined / a.windowSec + : meanNonNull(inclusionPoints); + const inclusionTpsPeak = maxNonNull(inclusionPoints); + + const safeInstant = async (promql: string): Promise => { + try { + return await queryInstant(promql, a.endedAtEpoch); + } catch (err) { + log("summary instant query failed", { + err: err instanceof Error ? err.message : String(err), + promql, + }); + return null; + } + }; + + const windowSpec = `${a.histogramWindowSec}s`; + const oneShotQuantile = (q: number, bucket: string) => + `histogram_quantile(${q}, sum by (le)(rate(${bucket}${NS}[${windowSpec}])))`; + + const [ + inclLatP50, + inclLatP95, + inclLatP99, + buildP50, + buildP95, + ppTxP50, + ppTxP95, + ] = await Promise.all([ + safeInstant( + oneShotQuantile(0.5, "aztec_mempool_tx_mined_delay_milliseconds_bucket"), + ), + safeInstant( + oneShotQuantile(0.95, "aztec_mempool_tx_mined_delay_milliseconds_bucket"), + ), + safeInstant( + oneShotQuantile(0.99, "aztec_mempool_tx_mined_delay_milliseconds_bucket"), + ), + safeInstant( + oneShotQuantile( + 0.5, + "aztec_sequencer_block_build_duration_milliseconds_bucket", + ), + ), + safeInstant( + oneShotQuantile( + 0.95, + "aztec_sequencer_block_build_duration_milliseconds_bucket", + ), + ), + safeInstant( + oneShotQuantile( + 0.5, + "aztec_public_processor_tx_duration_milliseconds_bucket", + ), + ), + safeInstant( + oneShotQuantile( + 0.95, + "aztec_public_processor_tx_duration_milliseconds_bucket", + ), + ), + ]); + + const reorgs = a.events.filter((e) => e.type === "chainPruned"); + const deepest = reorgs.reduce((max, e) => { + const d = (e.fromBlock ?? 0) - (e.toBlock ?? 0); + return d > max ? d : max; + }, 0); + + return { + headlineKpi: + inclusionTpsMean === null ? null : inclusionTpsMean / a.targetTps, + targetTps: a.targetTps, + inclusionTpsMean, + inclusionTpsPeak, + inclusionLatencyP50Ms: inclLatP50, + inclusionLatencyP95Ms: inclLatP95, + inclusionLatencyP99Ms: inclLatP99, + blockBuildDurationP50Ms: buildP50, + blockBuildDurationP95Ms: buildP95, + publicProcessorTxDurationP50Ms: ppTxP50, + publicProcessorTxDurationP95Ms: ppTxP95, + totalTxsMined, + totalTxsFailed: inclusionBlocks.reduce((s, b) => s + b.failedCount, 0), + totalSilentSkipCount: inclusionBlocks.reduce( + (s, b) => s + b.silentlySkippedCount, + 0, + ), + totalSilentSkipDurationMs: inclusionBlocks.reduce( + (s, b) => s + b.silentlySkippedDurationMs, + 0, + ), + reorgCount: reorgs.length, + deepestReorgBlocks: deepest, + }; +} + +// --- Inline shape validation --- + +function assertShape(payload: Record): void { + const required = [ + "schemaVersion", + "run", + "summary", + "timeSeries", + "blocks", + "events", + ] as const; + for (const key of required) { + if (!(key in payload)) { + throw new Error(`output missing required top-level key: ${key}`); + } + } + if (payload.schemaVersion !== "3") { + throw new Error( + `schemaVersion must be "3", got ${String(payload.schemaVersion)}`, + ); + } + const run = payload.run as Record; + for (const key of ["runId", "startedAt", "endedAt", "namespace"] as const) { + if (!(key in run)) { + throw new Error(`run.${key} missing`); + } + } +} + +// --- Live drain gate --- + +async function waitForScrapeWindowEnd(args: Args, endedAtEpoch: number) { + const minimumEndEpoch = endedAtEpoch + DRAIN_BUFFER_SECONDS; + const invokedAtEpoch = Math.floor(Date.now() / 1000); + + if (!args.waitForPendingZero) { + const drainSeconds = Math.max(0, minimumEndEpoch - invokedAtEpoch); + if (drainSeconds > 0) { + log( + `Draining ${drainSeconds}s to let OTel batches (60s) + Prom scrape (15s) settle`, + ); + await sleep(drainSeconds * 1000); + } + return { + scrapeWindowEndEpoch: minimumEndEpoch, + inclusionEndedAtEpoch: minimumEndEpoch, + pendingAtEnd: null as number | null, + pendingByRoleAtEnd: null, + pendingTimedOut: false, + }; + } + + if ( + !Number.isFinite(args.maxPendingWaitSeconds) || + args.maxPendingWaitSeconds < 0 + ) { + throw new Error( + `invalid --max-pending-wait-seconds: ${args.maxPendingWaitSeconds}`, + ); + } + + const deadlineEpoch = endedAtEpoch + args.maxPendingWaitSeconds; + const historicalZeroEpoch = await findPendingZeroEpoch( + endedAtEpoch, + Math.min(invokedAtEpoch, deadlineEpoch), + ); + if (historicalZeroEpoch !== undefined) { + const scrapeWindowEndEpoch = Math.max( + minimumEndEpoch, + historicalZeroEpoch + DRAIN_BUFFER_SECONDS, + ); + const waitSeconds = Math.max(0, scrapeWindowEndEpoch - invokedAtEpoch); + if (waitSeconds > 0) { + log("Pending txs drained; waiting for telemetry/log settle window", { + pendingZeroAt: historicalZeroEpoch, + waitSeconds, + }); + await sleep(waitSeconds * 1000); + } else { + log("Found historical pending-drain point; starting scrape", { + pendingZeroAt: historicalZeroEpoch, + scrapeWindowEndEpoch, + }); + } + return { + scrapeWindowEndEpoch, + inclusionEndedAtEpoch: historicalZeroEpoch, + pendingAtEnd: 0, + pendingByRoleAtEnd: await readPendingByRole(scrapeWindowEndEpoch), + pendingTimedOut: false, + }; + } + + let lastPending: number | null = null; + let pendingZeroSinceEpoch: number | undefined; + + while (Math.floor(Date.now() / 1000) <= deadlineEpoch) { + const nowEpoch = Math.floor(Date.now() / 1000); + try { + lastPending = await queryInstant(PENDING_VALIDATOR_TXS_QUERY, nowEpoch); + const pending = lastPending; + pendingZeroSinceEpoch = + pending !== null && pending <= 0 + ? (pendingZeroSinceEpoch ?? nowEpoch) + : undefined; + const zeroSettleEndEpoch = + pendingZeroSinceEpoch === undefined + ? undefined + : pendingZeroSinceEpoch + DRAIN_BUFFER_SECONDS; + const scrapeReadyEpoch = Math.max( + minimumEndEpoch, + zeroSettleEndEpoch ?? Number.POSITIVE_INFINITY, + ); + const settleRemainingSeconds = Math.max( + 0, + Number.isFinite(scrapeReadyEpoch) ? scrapeReadyEpoch - nowEpoch : 0, + ); + if (pending !== null && pending <= 0 && settleRemainingSeconds === 0) { + const pendingByRoleAtEnd = await readPendingByRole(scrapeReadyEpoch); + log("Validator pending txs drained; starting scrape", { + pending, + pendingByRoleAtEnd, + }); + return { + scrapeWindowEndEpoch: scrapeReadyEpoch, + inclusionEndedAtEpoch: pendingZeroSinceEpoch ?? nowEpoch, + pendingAtEnd: pending, + pendingByRoleAtEnd, + pendingTimedOut: false, + }; + } + log("Waiting for validator pending txs to drain before scrape", { + validatorPending: pending, + pendingZeroSinceEpoch, + settleRemainingSeconds, + timeoutRemainingSeconds: Math.max(0, deadlineEpoch - nowEpoch), + }); + } catch (err) { + log("pending tx drain check failed", { + err: err instanceof Error ? err.message : String(err), + timeoutRemainingSeconds: Math.max(0, deadlineEpoch - nowEpoch), + }); + } + await sleep(PENDING_POLL_SECONDS * 1000); + } + + const nowEpoch = Math.floor(Date.now() / 1000); + log("Timed out waiting for pending txs to drain; scraping current window", { + validatorPending: lastPending, + maxPendingWaitSeconds: args.maxPendingWaitSeconds, + }); + return { + scrapeWindowEndEpoch: Math.max(nowEpoch, minimumEndEpoch), + inclusionEndedAtEpoch: Math.max(nowEpoch, minimumEndEpoch), + pendingAtEnd: lastPending, + pendingByRoleAtEnd: await readPendingByRole(nowEpoch), + pendingTimedOut: true, + }; +} + +async function readPendingByRole(tEpoch: number) { + const read = async (promql: string): Promise => { + try { + return await queryInstant(promql, tEpoch); + } catch (err) { + log("pending-by-role instant query failed", { + err: err instanceof Error ? err.message : String(err), + promql, + }); + return null; + } + }; + const [rpc, validator, fullNode] = await Promise.all([ + read(PENDING_RPC_TXS_QUERY), + read(PENDING_VALIDATOR_TXS_QUERY), + read(PENDING_FULL_NODE_TXS_QUERY), + ]); + return { rpc, validator, fullNode }; +} + +async function findPendingZeroEpoch( + startEpoch: number, + endEpoch: number, +): Promise { + if (endEpoch <= startEpoch) { + return undefined; + } + try { + const series = await queryRange( + PENDING_VALIDATOR_TXS_QUERY, + startEpoch, + endEpoch, + PENDING_POLL_SECONDS, + ); + const points = series + .flatMap((s) => s.points) + .sort((a, b) => { + return a.unixEpoch - b.unixEpoch; + }); + return points.find((p) => p.value !== null && p.value <= 0)?.unixEpoch; + } catch (err) { + log("historical pending tx drain check failed", { + err: err instanceof Error ? err.message : String(err), + }); + return undefined; + } +} + +// --- Main --- + +async function main(): Promise { + const args = parseArgs(); + const startedAtEpoch = Math.floor(Date.parse(args.startedAt) / 1000); + const endedAtEpoch = Math.floor(Date.parse(args.endedAt) / 1000); + if (!Number.isFinite(startedAtEpoch) || !Number.isFinite(endedAtEpoch)) { + throw new Error( + `invalid timestamp: started=${args.startedAt}, ended=${args.endedAt}`, + ); + } + const windowSec = Math.max(1, endedAtEpoch - startedAtEpoch); + + log("Opening port-forward to Prometheus"); + const teardown = await portForwardProm(); + + try { + const drain = await waitForScrapeWindowEnd(args, endedAtEpoch); + const drainEndedAt = new Date( + drain.scrapeWindowEndEpoch * 1000, + ).toISOString(); + + // Bounded window: by default [startedAt, endedAt + drain buffer]. Live runs + // can opt into extending the end until pending TxPool depth reaches zero. + const promEndEpoch = drain.scrapeWindowEndEpoch; + + log("Capturing run context (image + aztec config env)"); + const [capturedImage, capturedAztecConfig] = await Promise.all([ + captureImage(), + captureAztecConfig(), + ]); + const previousRunContext = await loadPreviousRunContext(args.output); + const image = capturedImage ?? previousRunContext.image; + const aztecConfig = + Object.keys(capturedAztecConfig).length > 0 + ? capturedAztecConfig + : (previousRunContext.aztecConfig ?? {}); + + log("Scraping Prometheus time-series"); + const timeSeries = await scrapeTimeSeries(startedAtEpoch, promEndEpoch); + + log("Scraping per-block logs from gcloud"); + // Extend the log window by the drain buffer too — some blocks near endedAt + // arrive in gcloud after the test stops sending. + const logEndedAt = drainEndedAt; + let blocks: BlockRecord[] = []; + try { + blocks = await scrapeBlocks(args.startedAt, logEndedAt); + log(`Collected ${blocks.length} block records`); + } catch (err) { + log("blocks scrape failed", { + err: err instanceof Error ? err.message : String(err), + }); + } + + log("Scraping event logs from gcloud"); + let events: Event[] = []; + try { + events = await scrapeEvents(args.startedAt, logEndedAt, blocks); + log(`Collected ${events.length} events`); + } catch (err) { + log("events scrape failed", { + err: err instanceof Error ? err.message : String(err), + }); + } + + log("Scraping sequencer state transition logs from gcloud"); + let sequencerStateSlots: SequencerStateSlot[] = []; + try { + sequencerStateSlots = await scrapeSequencerStateSlots( + args.startedAt, + logEndedAt, + ); + log(`Collected ${sequencerStateSlots.length} sequencer state slots`); + } catch (err) { + log("sequencer state scrape failed", { + err: err instanceof Error ? err.message : String(err), + }); + } + + log("Building summary"); + const observedWindowSec = Math.max( + 1, + drain.inclusionEndedAtEpoch - startedAtEpoch, + ); + const summary = await buildSummary({ + targetTps: args.targetTps, + startedAtEpoch, + inclusionEndedAtEpoch: drain.inclusionEndedAtEpoch, + windowSec: observedWindowSec, + histogramWindowSec: observedWindowSec, + endedAtEpoch: drain.inclusionEndedAtEpoch, + timeSeries: timeSeries as Record, + blocks, + events, + }); + + const payload = { + schemaVersion: "3", + run: { + runId: args.runId, + startedAt: args.startedAt, + endedAt: args.endedAt, + inclusionEndedAt: new Date( + drain.inclusionEndedAtEpoch * 1000, + ).toISOString(), + drainEndedAt, + namespace: NAMESPACE, + gcpProject: GCP_PROJECT, + gcpLocation: GCP_REGION, + gkeCluster: GKE_CLUSTER, + ...(image !== undefined && { image }), + targetTps: args.targetTps, + testDurationSeconds: windowSec, + workload: args.workload, + ...(Object.keys(aztecConfig).length > 0 && { aztecConfig }), + scrapeConfig: { + drainSeconds: Math.max(0, drain.scrapeWindowEndEpoch - endedAtEpoch), + stepSeconds: STEP_SECONDS, + promUrl: `http://localhost:${PROM_PORT}`, + waitForPendingZero: args.waitForPendingZero, + maxPendingWaitSeconds: args.maxPendingWaitSeconds, + pendingAtScrape: drain.pendingAtEnd, + pendingByRoleAtScrape: drain.pendingByRoleAtEnd, + pendingWaitTimedOut: drain.pendingTimedOut, + }, + }, + summary, + timeSeries, + blocks, + events, + sequencerStateSlots, + }; + + assertShape(payload); + + const outPath = args.output ?? `/tmp/bench-10tps-${args.runId}.json`; + await writeFile(outPath, JSON.stringify(payload, null, 2)); + // stdout: single line, consumed by the shell wrapper. + console.log(outPath); + } finally { + teardown(); + } +} + +main().catch((err) => { + stderr.write( + `[scrape] fatal: ${err instanceof Error ? (err.stack ?? err.message) : String(err)}\n`, + ); + exit(1); +}); diff --git a/spartan/scripts/deploy_network.sh b/spartan/scripts/deploy_network.sh index 85568aa25957..6027cad2d272 100755 --- a/spartan/scripts/deploy_network.sh +++ b/spartan/scripts/deploy_network.sh @@ -121,6 +121,7 @@ PROVER_FAILED_PROOF_STORE=${PROVER_FAILED_PROOF_STORE:-} SEQ_MIN_TX_PER_BLOCK=${SEQ_MIN_TX_PER_BLOCK:-1} SEQ_MAX_TX_PER_BLOCK=${SEQ_MAX_TX_PER_BLOCK:-null} SEQ_MAX_TX_PER_CHECKPOINT=${SEQ_MAX_TX_PER_CHECKPOINT:-8} +P2P_MAX_PENDING_TX_COUNT=${P2P_MAX_PENDING_TX_COUNT:-null} SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER=${SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER:-null} SEQ_BLOCK_DURATION_MS=${SEQ_BLOCK_DURATION_MS:-} SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT=${SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT:-} @@ -570,6 +571,7 @@ VALIDATOR_HA_OLD_DUTIES_MAX_AGE_H = ${VALIDATOR_HA_OLD_DUTIES_MAX_AGE_H} SEQ_MIN_TX_PER_BLOCK = ${SEQ_MIN_TX_PER_BLOCK} SEQ_MAX_TX_PER_BLOCK = ${SEQ_MAX_TX_PER_BLOCK} SEQ_MAX_TX_PER_CHECKPOINT = ${SEQ_MAX_TX_PER_CHECKPOINT} +P2P_MAX_PENDING_TX_COUNT = ${P2P_MAX_PENDING_TX_COUNT} SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER = ${SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER} SEQ_BLOCK_DURATION_MS = ${SEQ_BLOCK_DURATION_MS:-null} SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT = ${SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT:-null} diff --git a/spartan/terraform/deploy-aztec-infra/main.tf b/spartan/terraform/deploy-aztec-infra/main.tf index 125009ce7e70..71a835281c00 100644 --- a/spartan/terraform/deploy-aztec-infra/main.tf +++ b/spartan/terraform/deploy-aztec-infra/main.tf @@ -220,6 +220,7 @@ locals { "validator.node.env.SEQ_MIN_TX_PER_BLOCK" = var.SEQ_MIN_TX_PER_BLOCK "validator.node.env.SEQ_MAX_TX_PER_BLOCK" = var.SEQ_MAX_TX_PER_BLOCK "validator.node.env.SEQ_MAX_TX_PER_CHECKPOINT" = var.SEQ_MAX_TX_PER_CHECKPOINT + "validator.node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "validator.node.env.SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER" = var.SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER "validator.node.env.SEQ_BLOCK_DURATION_MS" = var.SEQ_BLOCK_DURATION_MS "validator.node.env.SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT" = var.SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT @@ -315,10 +316,11 @@ locals { } })] custom_settings = { - "nodeType" = "p2p-bootstrap" - "service.p2p.nodePortEnabled" = var.P2P_NODEPORT_ENABLED - "service.p2p.announcePort" = local.p2p_port_p2p_bootstrap - "service.p2p.port" = local.p2p_port_p2p_bootstrap + "nodeType" = "p2p-bootstrap" + "service.p2p.nodePortEnabled" = var.P2P_NODEPORT_ENABLED + "service.p2p.announcePort" = local.p2p_port_p2p_bootstrap + "service.p2p.port" = local.p2p_port_p2p_bootstrap + "node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT } boot_node_host_path = "" bootstrap_nodes_path = "" @@ -402,6 +404,7 @@ locals { "node.node.env.P2P_GOSSIPSUB_DLO" = var.P2P_GOSSIPSUB_DLO "node.node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE + "node.node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "node.node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS "node.service.p2p.nodePortEnabled" = var.P2P_NODEPORT_ENABLED @@ -481,6 +484,7 @@ locals { "node.env.P2P_GOSSIPSUB_DLO" = var.P2P_GOSSIPSUB_DLO "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE + "node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.env.TX_FILE_STORE_ENABLED" = var.TX_FILE_STORE_ENABLED "node.env.TX_FILE_STORE_URL" = var.TX_FILE_STORE_URL @@ -515,6 +519,7 @@ locals { "node.proverRealProofs" = var.PROVER_REAL_PROOFS "node.env.BLOB_ALLOW_EMPTY_SOURCES" = var.BLOB_ALLOW_EMPTY_SOURCES "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS + "node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "node.env.P2P_TX_POOL_DELETE_TXS_AFTER_REORG" = var.P2P_TX_POOL_DELETE_TXS_AFTER_REORG "node.secret.envEnabled" = true "node.env.FISHERMAN_MODE" = "true" @@ -564,6 +569,7 @@ locals { "node.env.P2P_GOSSIPSUB_DLO" = var.P2P_GOSSIPSUB_DLO "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE + "node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS } @@ -604,6 +610,7 @@ locals { "node.env.P2P_GOSSIPSUB_DLO" = var.P2P_GOSSIPSUB_DLO "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE + "node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS "node.env.TX_COLLECTION_FILE_STORE_URLS" = var.TX_COLLECTION_FILE_STORE_URLS "node.env.BLOB_FILE_STORE_URLS" = var.BLOB_FILE_STORE_URLS @@ -641,6 +648,7 @@ locals { "node.env.P2P_GOSSIPSUB_DLO" = var.P2P_GOSSIPSUB_DLO "node.env.P2P_GOSSIPSUB_DHI" = var.P2P_GOSSIPSUB_DHI "node.env.P2P_DROP_TX_CHANCE" = var.P2P_DROP_TX_CHANCE + "node.env.P2P_MAX_PENDING_TX_COUNT" = var.P2P_MAX_PENDING_TX_COUNT "node.env.WS_NUM_HISTORIC_CHECKPOINTS" = var.WS_NUM_HISTORIC_CHECKPOINTS } boot_node_host_path = "node.env.BOOT_NODE_HOST" diff --git a/spartan/terraform/deploy-aztec-infra/variables.tf b/spartan/terraform/deploy-aztec-infra/variables.tf index 9bea5661f8bd..834b4d833217 100644 --- a/spartan/terraform/deploy-aztec-infra/variables.tf +++ b/spartan/terraform/deploy-aztec-infra/variables.tf @@ -389,6 +389,12 @@ variable "SEQ_MAX_TX_PER_CHECKPOINT" { default = null } +variable "P2P_MAX_PENDING_TX_COUNT" { + description = "Maximum number of pending txs the local mempool will hold before evictions kick in" + type = string + default = null +} + variable "SEQ_ENFORCE_TIME_TABLE" { description = "Whether to enforce the time table when building blocks" type = string diff --git a/spartan/terraform/deploy-metrics/main.tf b/spartan/terraform/deploy-metrics/main.tf index 3a38d21eada9..9bbefaeb698c 100644 --- a/spartan/terraform/deploy-metrics/main.tf +++ b/spartan/terraform/deploy-metrics/main.tf @@ -76,7 +76,7 @@ resource "helm_release" "aztec-gke-cluster" { upgrade_install = true dependency_update = true force_update = true - reuse_values = true + reuse_values = false # base values file values = [ diff --git a/spartan/terraform/gke-cluster/network-bench-bucket.tf b/spartan/terraform/gke-cluster/network-bench-bucket.tf new file mode 100644 index 000000000000..9554850415a0 --- /dev/null +++ b/spartan/terraform/gke-cluster/network-bench-bucket.tf @@ -0,0 +1,11 @@ +resource "google_storage_managed_folder" "aztec_testnet_network_bench_folder" { + bucket = google_storage_bucket.snapshots-bucket.name + name = "network_bench/" + force_destroy = true +} + +resource "google_storage_managed_folder_iam_policy" "aztec_testnet_network_bench_folder_policy" { + bucket = google_storage_managed_folder.aztec_testnet_network_bench_folder.bucket + managed_folder = google_storage_managed_folder.aztec_testnet_network_bench_folder.name + policy_data = data.google_iam_policy.all_users_storage_read.policy_data +} diff --git a/spartan/terraform/gke-cluster/snapshots.tf b/spartan/terraform/gke-cluster/snapshots.tf index ed5840aba99d..244636cb2a19 100644 --- a/spartan/terraform/gke-cluster/snapshots.tf +++ b/spartan/terraform/gke-cluster/snapshots.tf @@ -8,6 +8,13 @@ resource "google_storage_bucket" "snapshots-bucket" { log_object_prefix = "aztec-testnet" } + cors { + origin = ["*"] + method = ["GET", "HEAD"] + response_header = ["Content-Type"] + max_age_seconds = 3600 + } + autoclass { enabled = true terminal_storage_class = "ARCHIVE" diff --git a/yarn-project/archiver/src/archiver-sync.test.ts b/yarn-project/archiver/src/archiver-sync.test.ts index 1e4156ae8d57..c36b02c107a2 100644 --- a/yarn-project/archiver/src/archiver-sync.test.ts +++ b/yarn-project/archiver/src/archiver-sync.test.ts @@ -29,7 +29,7 @@ import { type MockProxy, mock } from 'jest-mock-extended'; import type { GetBlockReturnType } from 'viem'; import { Archiver, type ArchiverEmitter } from './archiver.js'; -import { L1ToL2MessagesNotReadyError } from './errors.js'; +import { BlockOrCheckpointSlotExpiredError, L1ToL2MessagesNotReadyError } from './errors.js'; import type { ArchiverInstrumentation } from './modules/instrumentation.js'; import { ArchiverL1Synchronizer } from './modules/l1_synchronizer.js'; import { KVArchiverDataStore } from './store/kv_archiver_store.js'; @@ -1522,7 +1522,7 @@ describe('Archiver Sync', () => { }); // Try to add the block for the past slot - should be rejected - await expect(archiver.addBlock(pastSlotBlocks[0])).rejects.toThrow(/past slot/); + await expect(archiver.addBlock(pastSlotBlocks[0])).rejects.toThrow(BlockOrCheckpointSlotExpiredError); }, 10_000); it('adds missing blocks when checkpoint has more blocks than local', async () => { @@ -1867,7 +1867,7 @@ describe('Archiver Sync', () => { totalManaUsed: 0n, feeAssetPriceModifier: 0n, }; - await archiver.setProposedCheckpoint(proposedCheckpoint); + await archiver.addProposedCheckpoint(proposedCheckpoint); // Advance L1 to block 2 (still in slot 1) — proposed checkpoint is still current fake.setL1BlockNumber(2n); @@ -1878,7 +1878,7 @@ describe('Archiver Sync', () => { expect(await archiver.getBlockNumber()).toEqual(lastProvisionalBlockNumber); // Proposed checkpoint should still be set - expect(await archiverStore.blockStore.getProposedCheckpointOnly()).toBeDefined(); + expect(await archiverStore.blockStore.getLastProposedCheckpoint()).toBeDefined(); // Proposed tip should be ahead of the checkpointed tip const tips = await archiver.getL2Tips(); @@ -1928,7 +1928,7 @@ describe('Archiver Sync', () => { totalManaUsed: 0n, feeAssetPriceModifier: 0n, }; - await archiver.setProposedCheckpoint(proposedCheckpoint); + await archiver.addProposedCheckpoint(proposedCheckpoint); // Advance L1 to block 4 (slot 2), ending slot 1 without checkpoint on L1 fake.setL1BlockNumber(4n); @@ -1948,7 +1948,7 @@ describe('Archiver Sync', () => { expect(await archiver.getSynchedCheckpointNumber()).toEqual(CheckpointNumber(1)); // Proposed checkpoint should be cleared, so proposed tip falls back to checkpointed tip - expect(await archiverStore.blockStore.getProposedCheckpointOnly()).toBeUndefined(); + expect(await archiverStore.blockStore.getLastProposedCheckpoint()).toBeUndefined(); const tips = await archiver.getL2Tips(); expect(tips.proposedCheckpoint.checkpoint.number).toEqual(tips.checkpointed.checkpoint.number); expect(tips.proposedCheckpoint.block.number).toEqual(tips.checkpointed.block.number); diff --git a/yarn-project/archiver/src/archiver.ts b/yarn-project/archiver/src/archiver.ts index 0e440d8c9697..046a5b0d66d5 100644 --- a/yarn-project/archiver/src/archiver.ts +++ b/yarn-project/archiver/src/archiver.ts @@ -25,12 +25,13 @@ import { getEpochAtSlot, getSlotAtNextL1Block, getSlotRangeForEpoch, + getTimestampForSlot, getTimestampRangeForEpoch, } from '@aztec/stdlib/epoch-helpers'; import { type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client'; import { type ArchiverConfig, mapArchiverConfig } from './config.js'; -import { BlockAlreadyCheckpointedError, NoBlobBodiesFoundError } from './errors.js'; +import { BlockAlreadyCheckpointedError, BlockOrCheckpointSlotExpiredError, NoBlobBodiesFoundError } from './errors.js'; import { validateAndLogHistoricalLogsAvailability } from './l1/validate_historical_logs.js'; import { validateAndLogTraceAvailability } from './l1/validate_trace.js'; import { ArchiverDataSourceBase } from './modules/data_source_base.js'; @@ -45,11 +46,20 @@ export type { ArchiverEmitter }; /** Request to add a block to the archiver, queued for processing by the sync loop. */ type AddBlockRequest = { + type: 'block'; block: L2Block; resolve: () => void; reject: (err: Error) => void; }; +/** Request to add a proposed checkpoint to the archiver, queued for processing by the sync loop. */ +type AddProposedCheckpointRequest = { + type: 'checkpoint'; + checkpoint: ProposedCheckpointInput; + resolve: () => void; + reject: (err: Error) => void; +}; + export type ArchiverDeps = { telemetry?: TelemetryClient; blobClient: BlobClientInterface; @@ -75,8 +85,8 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra private initialSyncComplete: boolean = false; private initialSyncPromise: PromiseWithResolvers; - /** Queue of blocks to be added to the store, processed by the sync loop. */ - private blockQueue: AddBlockRequest[] = []; + /** Queue of blocks and checkpoints to be added to the store, processed by the sync loop. */ + private inboundQueue: (AddBlockRequest | AddProposedCheckpointRequest)[] = []; /** Helper to handle updates to the store */ private readonly updater: ArchiverDataStoreUpdater; @@ -209,6 +219,14 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra return this.runningPromise.trigger(); } + public trySyncImmediate() { + try { + return this.syncImmediate(); + } catch (err) { + this.log.error(`Failed to trigger immediate archiver sync: ${err}`, err); + } + } + /** * Queues a block to be added to the archiver store and triggers processing. * The block will be processed by the sync loop. @@ -217,63 +235,85 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra * @returns A promise that resolves when the block has been added to the store, or rejects on error. */ public addBlock(block: L2Block): Promise { - return new Promise((resolve, reject) => { - this.blockQueue.push({ block, resolve, reject }); - this.log.debug(`Queued block ${block.number} for processing`); - // Trigger an immediate sync, but don't wait for it - the promise resolves when the block is processed - this.syncImmediate().catch(err => { - this.log.error(`Sync immediate call failed: ${err}`); - }); - }); + const promise = promiseWithResolvers(); + this.inboundQueue.push({ block, ...promise, type: 'block' }); + this.log.debug(`Queued block ${block.number} for processing`); + void this.trySyncImmediate(); + return promise.promise; } - public async setProposedCheckpoint(pending: ProposedCheckpointInput): Promise { - await this.updater.setProposedCheckpoint(pending); + /** + * Queues a new proposed checkpoint into the archiver store. + * Checks that the checkpoint is not for an L2 slot already synced from L1. + * Resolves once the checkpoint has been processed. + */ + public addProposedCheckpoint(pending: ProposedCheckpointInput): Promise { + const promise = promiseWithResolvers(); + this.inboundQueue.push({ checkpoint: pending, ...promise, type: 'checkpoint' }); + this.log.debug(`Queued checkpoint ${pending.checkpointNumber} for processing`); + void this.trySyncImmediate(); + return promise.promise; } /** - * Processes all queued blocks, adding them to the store. + * Processes all queued blocks and checkpoints, adding them to the store. * Called at the beginning of each sync iteration. - * Blocks are processed in the order they were queued. + * Items are processed in the order they were queued. */ - private async processQueuedBlocks(): Promise { - if (this.blockQueue.length === 0) { + private async processInboundQueue(): Promise { + if (this.inboundQueue.length === 0) { return; } - // Take all blocks from the queue - const queuedItems = this.blockQueue.splice(0, this.blockQueue.length); - this.log.debug(`Processing ${queuedItems.length} queued block(s)`); + // Take all items from the queue + const queuedItems = this.inboundQueue.splice(0, this.inboundQueue.length); + this.log.debug(`Processing ${queuedItems.length} queued inbound items`); // Calculate slot threshold for validation const l1Timestamp = this.synchronizer.getL1Timestamp(); const slotAtNextL1Block = l1Timestamp === undefined ? undefined : getSlotAtNextL1Block(l1Timestamp, this.l1Constants); - // Process each block individually to properly resolve/reject each promise - for (const { block, resolve, reject } of queuedItems) { - const blockSlot = block.header.globalVariables.slotNumber; - if (slotAtNextL1Block !== undefined && blockSlot < slotAtNextL1Block) { + // Helpers for manipulating blocks and checkpoints in the queue + const getSlot: (item: AddBlockRequest | AddProposedCheckpointRequest) => SlotNumber = item => + item.type === 'block' ? item.block.header.globalVariables.slotNumber : item.checkpoint.header.slotNumber; + const getNumber: (item: AddBlockRequest | AddProposedCheckpointRequest) => number = item => + item.type === 'block' ? item.block.number : item.checkpoint.checkpointNumber; + + // Process each item individually to properly resolve/reject each promise + for (const item of queuedItems) { + const { resolve, reject, type } = item; + const itemSlot = getSlot(item); + const itemNumber = getNumber(item); + if (slotAtNextL1Block !== undefined && itemSlot < slotAtNextL1Block) { + const nextSlotTimestamp = getTimestampForSlot(slotAtNextL1Block, this.l1Constants); this.log.warn( - `Rejecting proposed block ${block.number} for past slot ${blockSlot} (current is ${slotAtNextL1Block})`, - { block: block.toBlockInfo(), l1Timestamp, slotAtNextL1Block }, + `Rejecting proposed ${type} ${itemNumber} for past slot ${itemSlot} (current ${slotAtNextL1Block})`, + { number: itemNumber, type, l1Timestamp, slotAtNextL1Block, nextSlotTimestamp }, ); - reject(new Error(`Block ${block.number} is for past slot ${blockSlot} (current is ${slotAtNextL1Block})`)); + reject(new BlockOrCheckpointSlotExpiredError(itemSlot, nextSlotTimestamp, l1Timestamp)); continue; } try { - const [durationMs] = await elapsed(() => this.updater.addProposedBlock(block)); - this.instrumentation.processNewProposedBlock(durationMs, block); - this.log.debug(`Added block ${block.number} to store`); + if (type === 'block') { + const [durationMs] = await elapsed(() => this.updater.addProposedBlock(item.block)); + this.instrumentation.processNewProposedBlock(durationMs, item.block); + } else { + await this.updater.addProposedCheckpoint(item.checkpoint); + } + this.log.debug(`Added ${type} ${itemNumber} to store`); resolve(); } catch (err: any) { if (err instanceof BlockAlreadyCheckpointedError) { - this.log.debug(`Proposed block ${block.number} matches already checkpointed block, ignoring late proposal`); + this.log.debug(`Proposed block ${itemNumber} matches already checkpointed block, ignoring late proposal`); resolve(); continue; } - this.log.error(`Failed to add block ${block.number} to store: ${err.message}`); + this.log.error(`Failed to add ${type} ${itemNumber} to store: ${err.message}`, err, { + number: itemNumber, + type, + }); reject(err); } } @@ -289,7 +329,7 @@ export class Archiver extends ArchiverDataSourceBase implements L2BlockSink, Tra @trackSpan('Archiver.sync') private async sync() { // Process any queued blocks first, before doing L1 sync - await this.processQueuedBlocks(); + await this.processInboundQueue(); // Now perform L1 sync await this.syncFromL1(); } diff --git a/yarn-project/archiver/src/config.ts b/yarn-project/archiver/src/config.ts index 7bae461e9110..8e6586d3fd9a 100644 --- a/yarn-project/archiver/src/config.ts +++ b/yarn-project/archiver/src/config.ts @@ -7,6 +7,7 @@ import { booleanConfigHelper, getConfigFromMappings, numberConfigHelper, + optionalNumberConfigHelper, } from '@aztec/foundation/config'; import { type ChainConfig, @@ -50,7 +51,7 @@ export const archiverConfigMappings: ConfigMappingsType = { }, archiverStoreMapSizeKb: { env: 'ARCHIVER_STORE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the archiver DB in KB. Overwrites the general dataStoreMapSizeKb.', }, skipValidateCheckpointAttestations: { diff --git a/yarn-project/archiver/src/errors.ts b/yarn-project/archiver/src/errors.ts index f9a021a4d296..1b42d82d66d6 100644 --- a/yarn-project/archiver/src/errors.ts +++ b/yarn-project/archiver/src/errors.ts @@ -1,14 +1,17 @@ +import type { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; import type { Fr } from '@aztec/foundation/schemas'; export class NoBlobBodiesFoundError extends Error { constructor(l2BlockNum: number) { super(`No blob bodies found for block ${l2BlockNum}`); + this.name = 'NoBlobBodiesFoundError'; } } export class BlockNumberNotSequentialError extends Error { constructor(newBlockNumber: number, previous: number | undefined) { super(`Cannot insert new block ${newBlockNumber} given previous block number is ${previous ?? 'undefined'}`); + this.name = 'BlockNumberNotSequentialError'; } } @@ -22,18 +25,29 @@ export class InitialCheckpointNumberNotSequentialError extends Error { previousCheckpointNumber ?? 'undefined' }`, ); + this.name = 'InitialCheckpointNumberNotSequentialError'; } } -export class CheckpointNumberNotSequentialError extends Error { +export class BlockCheckpointNumberNotSequentialError extends Error { constructor( - newCheckpointNumber: number, - previous: number | undefined, - source: 'confirmed' | 'proposed' = 'confirmed', + blockNumber: BlockNumber, + blockCheckpointNumber: CheckpointNumber, + previous: CheckpointNumber | undefined, ) { super( - `Cannot insert new checkpoint ${newCheckpointNumber} given previous ${source} checkpoint number is ${previous ?? 'undefined'}`, + `Cannot insert new block ${blockNumber} for checkpoint ${blockCheckpointNumber} given previous checkpoint number is ${previous ?? 'undefined'}`, + ); + this.name = 'BlockCheckpointNumberNotSequentialError'; + } +} + +export class CheckpointNumberNotSequentialError extends Error { + constructor(newCheckpointNumber: CheckpointNumber, previous: CheckpointNumber | undefined) { + super( + `Cannot insert new checkpoint ${newCheckpointNumber} given previous checkpoint number is ${previous ?? 'undefined'}`, ); + this.name = 'CheckpointNumberNotSequentialError'; } } @@ -42,6 +56,7 @@ export class BlockIndexNotSequentialError extends Error { super( `Cannot insert new block at checkpoint index ${newBlockIndex} given previous block index is ${previousBlockIndex ?? 'undefined'}`, ); + this.name = 'BlockIndexNotSequentialError'; } } @@ -55,18 +70,21 @@ export class BlockArchiveNotConsistentError extends Error { super( `Cannot insert new block number ${newBlockNumber} with archive ${newBlockArchive.toString()} previous block number is ${previousBlockNumber ?? 'undefined'}, previous archive is ${previousBlockArchive?.toString() ?? 'undefined'}`, ); + this.name = 'BlockArchiveNotConsistentError'; } } export class CheckpointNotFoundError extends Error { constructor(checkpointNumber: number) { super(`Failed to find expected checkpoint number ${checkpointNumber}`); + this.name = 'CheckpointNotFoundError'; } } export class BlockNotFoundError extends Error { constructor(blockNumber: number) { super(`Failed to find expected block number ${blockNumber}`); + this.name = 'BlockNotFoundError'; } } @@ -119,19 +137,34 @@ export class ProposedCheckpointStaleError extends Error { } } -/** Thrown when a proposed checkpoint number is not the expected confirmed + 1. */ +/** Thrown when a proposed checkpoint number is not the expected latestTip + 1. */ export class ProposedCheckpointNotSequentialError extends Error { constructor( public readonly proposedCheckpointNumber: number, - public readonly confirmedCheckpointNumber: number, + public readonly latestTipNumber: number, ) { super( - `Proposed checkpoint ${proposedCheckpointNumber} is not sequential: expected ${confirmedCheckpointNumber + 1} (confirmed + 1)`, + `Proposed checkpoint ${proposedCheckpointNumber} is not sequential: expected ${latestTipNumber + 1} (latest tip + 1, where tip is highest of confirmed or pending)`, ); this.name = 'ProposedCheckpointNotSequentialError'; } } +/** Thrown when a proposed checkpoint or block L2 slot has already expired on L1. */ +export class BlockOrCheckpointSlotExpiredError extends Error { + constructor( + public readonly slot: number, + public readonly nextSlotStart: bigint, + public readonly l1TimestampSynced: bigint | undefined, + ) { + super( + `Checkpoint or block for slot ${slot} is expired: L1 synced to ${l1TimestampSynced} which is past the next slot start ${nextSlotStart}. ` + + `If the checkpoint still lands via a late L1 tx, the archiver will pick it up via normal L1-sync (not the pending-queue shortcut).`, + ); + this.name = 'BlockOrCheckpointSlotExpiredError'; + } +} + /** Thrown when attempting to promote a proposed checkpoint but no proposed checkpoint exists in the store. */ export class NoProposedCheckpointToPromoteError extends Error { constructor() { diff --git a/yarn-project/archiver/src/l1/calldata_retriever.test.ts b/yarn-project/archiver/src/l1/calldata_retriever.test.ts index 0d1c78ef707c..36f1db400daf 100644 --- a/yarn-project/archiver/src/l1/calldata_retriever.test.ts +++ b/yarn-project/archiver/src/l1/calldata_retriever.test.ts @@ -11,7 +11,7 @@ import { withHexPrefix } from '@aztec/foundation/string'; import { RollupAbi } from '@aztec/l1-artifacts'; import { Signature } from '@aztec/stdlib/block'; import { GasFees } from '@aztec/stdlib/gas'; -import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p'; +import { ConsensusPayload, getHashedSignaturePayloadTypedData } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { jest } from '@jest/globals'; @@ -88,6 +88,8 @@ describe('CalldataRetriever', () => { beforeEach(() => { txHash = '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'; publicClient = mock(); + // CalldataRetriever reads `publicClient.chain.id` to build the EIP-712 signing context. + (publicClient as unknown as { chain: { id: number } }).chain = { id: 1 }; debugClient = mock(); logger = createLogger('test:calldata_retriever'); instrumentation = mock(); @@ -376,12 +378,14 @@ describe('CalldataRetriever', () => { const tx = makeMulticall3Transaction([{ target: rollupAddress.toString(), callData: proposeCalldata }]); publicClient.getTransaction.mockResolvedValue(tx); - // Compute the expected payloadDigest using ConsensusPayload (same logic as the validator) - // Note: feeAssetPriceModifier is 0n in makeProposeCalldata + // Compute the expected payloadDigest using the same EIP-712 typed data hash + // that CalldataRetriever.computePayloadDigest uses under the hood. const checkpointHeader = CheckpointHeader.fromViem(header); - const consensusPayload = new ConsensusPayload(checkpointHeader, archiveRoot, feeAssetPriceModifier); - const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation); - const expectedPayloadDigest = keccak256(payloadToSign); + const consensusPayload = new ConsensusPayload(checkpointHeader, archiveRoot, feeAssetPriceModifier, { + chainId: 1, + rollupAddress, + }); + const expectedPayloadDigest = getHashedSignaturePayloadTypedData(consensusPayload).toString() as Hex; // Mock only attestationsHash computation; use real payloadDigest jest diff --git a/yarn-project/archiver/src/l1/calldata_retriever.ts b/yarn-project/archiver/src/l1/calldata_retriever.ts index 98bcbe52866a..22acd1df7c02 100644 --- a/yarn-project/archiver/src/l1/calldata_retriever.ts +++ b/yarn-project/archiver/src/l1/calldata_retriever.ts @@ -7,7 +7,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import type { Logger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; import { CommitteeAttestation } from '@aztec/stdlib/block'; -import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p'; +import { ConsensusPayload, getHashedSignaturePayloadTypedData } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { @@ -61,6 +61,13 @@ export class CalldataRetriever { private readonly rollupAddress: EthAddress, ) {} + private getSignatureContext() { + return { + chainId: this.publicClient.chain.id, + rollupAddress: this.rollupAddress, + }; + } + /** * Gets checkpoint header and metadata from the calldata of an L1 transaction. * Tries multicall3 decoding, falls back to trace-based extraction. @@ -466,9 +473,13 @@ export class CalldataRetriever { /** Computes the keccak256 payload digest from the checkpoint header, archive root, and fee asset price modifier. */ private computePayloadDigest(header: CheckpointHeader, archiveRoot: Fr, feeAssetPriceModifier: bigint): Hex { - const consensusPayload = new ConsensusPayload(header, archiveRoot, feeAssetPriceModifier); - const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation); - return keccak256(payloadToSign); + const consensusPayload = new ConsensusPayload( + header, + archiveRoot, + feeAssetPriceModifier, + this.getSignatureContext(), + ); + return getHashedSignaturePayloadTypedData(consensusPayload).toString(); } /** diff --git a/yarn-project/archiver/src/modules/data_source_base.ts b/yarn-project/archiver/src/modules/data_source_base.ts index c05bec3cde24..beb52301d99e 100644 --- a/yarn-project/archiver/src/modules/data_source_base.ts +++ b/yarn-project/archiver/src/modules/data_source_base.ts @@ -127,6 +127,22 @@ export abstract class ArchiverDataSourceBase return this.store.getCheckpointedBlocks(from, limit); } + public getCheckpointData(checkpointNumber: CheckpointNumber): Promise { + return this.store.getCheckpointData(checkpointNumber); + } + + public getCheckpointDataRange(from: CheckpointNumber, limit: number): Promise { + return this.store.getCheckpointDataRange(from, limit); + } + + public getCheckpointNumberBySlot(slot: SlotNumber): Promise { + return this.store.getCheckpointNumberBySlot(slot); + } + + public getBlockDataWithCheckpointContext(blockNumber: BlockNumber) { + return this.store.getBlockDataWithCheckpointContext(blockNumber); + } + public getBlockHeaderByHash(blockHash: BlockHash): Promise { return this.store.getBlockHeaderByHash(blockHash); } @@ -163,12 +179,12 @@ export abstract class ArchiverDataSourceBase return this.store.getSettledTxReceipt(txHash, this.l1Constants); } - public getProposedCheckpoint(): Promise { - return this.store.getProposedCheckpoint(); + public getLastCheckpoint(): Promise { + return this.store.getLastCheckpoint(); } - public getProposedCheckpointOnly(): Promise { - return this.store.getProposedCheckpointOnly(); + public getLastProposedCheckpoint(): Promise { + return this.store.getLastProposedCheckpoint(); } public isPendingChainInvalid(): Promise { @@ -263,6 +279,7 @@ export abstract class ArchiverDataSourceBase checkpoint.header, blocksForCheckpoint, checkpoint.checkpointNumber, + checkpoint.feeAssetPriceModifier, ); return new PublishedCheckpoint(fullCheckpoint, checkpoint.l1, checkpoint.attestations); } diff --git a/yarn-project/archiver/src/modules/data_store_updater.ts b/yarn-project/archiver/src/modules/data_store_updater.ts index f4f67a96b4c5..f754f82aa8b3 100644 --- a/yarn-project/archiver/src/modules/data_store_updater.ts +++ b/yarn-project/archiver/src/modules/data_store_updater.ts @@ -100,6 +100,7 @@ export class ArchiverDataStoreUpdater { attestations: CommitteeAttestation[]; checkpoint: PublishedCheckpoint; }, + evictProposedFrom?: CheckpointNumber, ): Promise { for (const checkpoint of checkpoints) { validateCheckpoint(checkpoint.checkpoint, { rollupManaLimit: this.opts?.rollupManaLimit }); @@ -126,14 +127,17 @@ export class ArchiverDataStoreUpdater { this.store.addLogs(newBlocks), // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them ...newBlocks.map(block => this.addContractDataToDb(block)), - // Promote the proposed checkpoint if requested + // Promote the proposed checkpoint if requested (uses explicit checkpoint number) promoteProposed ? this.store.promoteProposedToCheckpointed( + promoteProposed.checkpoint.checkpoint.number, promoteProposed.l1, promoteProposed.attestations, promoteProposed.checkpoint.checkpoint.archive.root, ) : undefined, + // Evict pending checkpoints that diverged from what L1 mined + evictProposedFrom !== undefined ? this.store.evictProposedCheckpointsFrom(evictProposedFrom) : undefined, ]); await this.l2TipsCache?.refresh(); @@ -142,9 +146,9 @@ export class ArchiverDataStoreUpdater { return result; } - public async setProposedCheckpoint(proposedCheckpoint: ProposedCheckpointInput) { + public async addProposedCheckpoint(proposedCheckpoint: ProposedCheckpointInput) { const result = await this.store.transactionAsync(async () => { - await this.store.setProposedCheckpoint(proposedCheckpoint); + await this.store.addProposedCheckpoint(proposedCheckpoint); await this.l2TipsCache?.refresh(); }); @@ -245,8 +249,8 @@ export class ArchiverDataStoreUpdater { const result = await this.removeBlocksAfter(blockNumber); - // Clear the proposed checkpoint if it exists, since its blocks have been pruned - await this.store.deleteProposedCheckpoint(); + // Clear all pending proposed checkpoints since their blocks have been pruned + await this.store.deleteProposedCheckpoints(); await this.l2TipsCache?.refresh(); return result; diff --git a/yarn-project/archiver/src/modules/l1_synchronizer.ts b/yarn-project/archiver/src/modules/l1_synchronizer.ts index d2cbb5924e6a..52348ffd1f47 100644 --- a/yarn-project/archiver/src/modules/l1_synchronizer.ts +++ b/yarn-project/archiver/src/modules/l1_synchronizer.ts @@ -10,6 +10,7 @@ import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/br import { Buffer16, Buffer32 } from '@aztec/foundation/buffer'; import { partition, pick } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { retryTimes } from '@aztec/foundation/retry'; import { count } from '@aztec/foundation/string'; @@ -19,6 +20,7 @@ import { type ArchiverEmitter, L2BlockSourceEvents, type ValidateCheckpointResul import { Checkpoint, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import { type L1RollupConstants, getEpochAtSlot, getSlotAtNextL1Block } from '@aztec/stdlib/epoch-helpers'; import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; +import type { CoordinationSignatureContext } from '@aztec/stdlib/p2p'; import { type Traceable, type Tracer, execInSpan, trackSpan } from '@aztec/telemetry-client'; import { InitialCheckpointNumberNotSequentialError } from '../errors.js'; @@ -111,6 +113,13 @@ export class ArchiverL1Synchronizer implements Traceable { return this.l1Timestamp; } + private getSignatureContext(): CoordinationSignatureContext { + return { + chainId: this.publicClient.chain.id, + rollupAddress: EthAddress.fromString(this.rollup.address), + }; + } + /** Checks that the ethereum node we are connected to has a latest timestamp no more than the allowed drift. Throw if not. */ public async testEthereumNodeSynced(): Promise { const maxAllowedDelay = this.config.maxAllowedEthClientDriftSeconds; @@ -777,11 +786,14 @@ export class ArchiverL1Synchronizer implements Traceable { }, ); - // Check if the last checkpoint matches the proposed one (so we can skip blob fetch). - // We only check the last one because the proposed checkpoint is always the most recent one, - // and if it's in a multi-checkpoint batch it will always be last (sorted by L1 block number). + // Check if the last checkpoint matches a local pending entry (so we can skip blob fetch). + // We only check the last one; if it matches, the blob fetch is skipped for that entry. + // TODO(palla/pipelining): We may have more than a single checkpoint to promote const lastCalldataCheckpoint = calldataCheckpoints[calldataCheckpoints.length - 1]; - const checkpointToPromote = await this.tryBuildPublishedCheckpointFromProposed(lastCalldataCheckpoint); + const promoteResult = await this.tryBuildPublishedCheckpointFromProposed(lastCalldataCheckpoint); + const checkpointToPromote = promoteResult && !('diverged' in promoteResult) ? promoteResult : undefined; + const evictProposedFrom = + promoteResult && 'diverged' in promoteResult ? promoteResult.fromCheckpointNumber : undefined; // Then fetch blobs in parallel and build the full published checkpoints const toFetchBlobs = checkpointToPromote ? calldataCheckpoints.slice(0, -1) : calldataCheckpoints; @@ -809,7 +821,13 @@ export class ArchiverL1Synchronizer implements Traceable { for (const published of publishedCheckpoints) { const validationResult = this.config.skipValidateCheckpointAttestations ? { valid: true as const } - : await validateCheckpointAttestations(published, this.epochCache, this.l1Constants, this.log); + : await validateCheckpointAttestations( + published, + this.epochCache, + this.l1Constants, + this.getSignatureContext(), + this.log, + ); // Only update the validation result if it has changed, so we can keep track of the first invalid checkpoint // in case there is a sequence of more than one invalid checkpoint, as we need to invalidate the first one. @@ -904,6 +922,7 @@ export class ArchiverL1Synchronizer implements Traceable { attestations: lastCalldataCheckpoint.attestations, checkpoint: maybeValidCheckpointToPromote, }, + evictProposedFrom, ), ), ); @@ -975,17 +994,24 @@ export class ArchiverL1Synchronizer implements Traceable { return { ...rollupStatus, lastRetrievedCheckpoint, lastL1BlockWithCheckpoint }; } - /** Checks if this checkpoint matches the local proposed one, and if so, loads local data to build a synthetic published checkpoint. */ + /** + * Checks if a specific checkpoint matches a local pending entry, and if so, loads local data to build + * a synthetic published checkpoint (skipping blob fetch). + * + * Returns { diverged: true, fromCheckpointNumber } when the L1 checkpoint does NOT match local pending + * data for that number, so the caller can evict the entire pending suffix >= fromCheckpointNumber + * (those entries chain off the now-invalid local state) within the same addCheckpoints transaction. + */ private async tryBuildPublishedCheckpointFromProposed( calldataCheckpoint: RetrievedCheckpointFromCalldata | undefined, - ): Promise { - const proposed = await this.store.getProposedCheckpointOnly(); - if ( - this.config.skipPromoteProposedCheckpointDuringL1Sync || - !proposed || - !calldataCheckpoint || - proposed.checkpointNumber !== calldataCheckpoint.checkpointNumber - ) { + ): Promise { + if (this.config.skipPromoteProposedCheckpointDuringL1Sync || !calldataCheckpoint) { + return undefined; + } + + // Look up the specific pending entry for the checkpoint being mined, not just the tip + const proposed = await this.store.getProposedCheckpointByNumber(calldataCheckpoint.checkpointNumber); + if (!proposed) { return undefined; } @@ -1004,7 +1030,8 @@ export class ArchiverL1Synchronizer implements Traceable { calldataArchiveRoot: calldataCheckpoint.archiveRoot.toString(), }, ); - return undefined; + // Return a divergence signal so the caller can evict pending >= this number + return { diverged: true, fromCheckpointNumber: proposed.checkpointNumber }; } this.log.debug( diff --git a/yarn-project/archiver/src/modules/validation.test.ts b/yarn-project/archiver/src/modules/validation.test.ts index 0bfeb50f1566..50fa30fc5efa 100644 --- a/yarn-project/archiver/src/modules/validation.test.ts +++ b/yarn-project/archiver/src/modules/validation.test.ts @@ -7,6 +7,7 @@ import { Signature } from '@aztec/foundation/eth-signature'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { CommitteeAttestation, EthAddress } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; +import { TEST_COORDINATION_SIGNATURE_CONTEXT } from '@aztec/stdlib/testing'; import { type MockProxy, mock } from 'jest-mock-extended'; import assert from 'node:assert'; @@ -58,7 +59,13 @@ describe('validateCheckpointAttestations', () => { it('validates a checkpoint if no committee is found', async () => { const checkpoint = await makeCheckpoint([], []); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(result.valid).toBe(true); expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(EpochNumber(0)); @@ -66,7 +73,13 @@ describe('validateCheckpointAttestations', () => { it('validates a checkpoint with no attestations if no committee is found', async () => { const checkpoint = await makeCheckpoint(signers, committee); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(result.valid).toBe(true); expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(EpochNumber(0)); @@ -76,7 +89,13 @@ describe('validateCheckpointAttestations', () => { // This should already be covered by the case of empty committee epochCache.isEscapeHatchOpen.mockResolvedValue(true); const checkpoint = await makeCheckpoint(signers, committee); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(result.valid).toBe(true); expect(epochCache.isEscapeHatchOpen).not.toHaveBeenCalled(); }); @@ -90,23 +109,44 @@ describe('validateCheckpointAttestations', () => { it('uses feeAssetPriceModifier when recovering attestors', async () => { const checkpoint = await makeCheckpoint(signers.slice(0, 4), committee, 1, 1n); - const attestationInfos = getAttestationInfoFromPublishedCheckpoint(checkpoint); + const attestationInfos = getAttestationInfoFromPublishedCheckpoint( + checkpoint, + TEST_COORDINATION_SIGNATURE_CONTEXT, + ); expect(attestationInfos.filter(a => a.status === 'recovered-from-signature').length).toBe(4); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(result.valid).toBe(true); }); it('requests committee for the correct epoch', async () => { const checkpoint = await makeCheckpoint(signers, committee, 28); - await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(epochCache.getCommitteeForEpoch).toHaveBeenCalledWith(EpochNumber(2)); }); it('fails if there is an attestation from a non-committee member', async () => { const badSigner = Secp256k1Signer.random(); const checkpoint = await makeCheckpoint([...signers, badSigner], [...committee, badSigner.address]); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); assert(result.reason === 'invalid-attestation'); expect(result.checkpoint.checkpointNumber).toEqual(checkpoint.checkpoint.number); @@ -118,7 +158,13 @@ describe('validateCheckpointAttestations', () => { it('fails if there is an empty attestation', async () => { const checkpoint = await makeCheckpoint(signers.slice(0, 4), committee); checkpoint.attestations[1] = new CommitteeAttestation(EthAddress.ZERO, Signature.empty()); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); assert(result.reason === 'invalid-attestation'); expect(result.checkpoint.checkpointNumber).toEqual(checkpoint.checkpoint.number); @@ -141,10 +187,16 @@ describe('validateCheckpointAttestations', () => { checkpoint.attestations[0] = new CommitteeAttestation(EthAddress.ZERO, invalidSig); // Verify that the invalid signature is detected - const attestations = getAttestationInfoFromPublishedCheckpoint(checkpoint); + const attestations = getAttestationInfoFromPublishedCheckpoint(checkpoint, TEST_COORDINATION_SIGNATURE_CONTEXT); expect(attestations[0].status).toBe('invalid-signature'); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); assert(result.reason === 'invalid-attestation'); expect(result.checkpoint.checkpointNumber).toEqual(checkpoint.checkpoint.number); @@ -162,10 +214,16 @@ describe('validateCheckpointAttestations', () => { checkpoint.attestations[2] = new CommitteeAttestation(original.address, flipped); // Verify the flipped signature is detected as invalid - const attestations = getAttestationInfoFromPublishedCheckpoint(checkpoint); + const attestations = getAttestationInfoFromPublishedCheckpoint(checkpoint, TEST_COORDINATION_SIGNATURE_CONTEXT); expect(attestations[2].status).toBe('invalid-signature'); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); assert(result.reason === 'invalid-attestation'); expect(result.checkpoint.checkpointNumber).toEqual(checkpoint.checkpoint.number); @@ -185,7 +243,13 @@ describe('validateCheckpointAttestations', () => { // Index 2 is a valid attestation from signers[2] - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); assert(result.reason === 'invalid-attestation'); expect(result.invalidIndex).toBe(1); // Should be 1 (the original index), not 0 @@ -193,7 +257,13 @@ describe('validateCheckpointAttestations', () => { it('returns false if insufficient attestations', async () => { const checkpoint = await makeCheckpoint(signers.slice(0, 2), committee); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); expect(result.reason).toBe('insufficient-attestations'); expect(result.checkpoint.checkpointNumber).toEqual(checkpoint.checkpoint.number); @@ -203,7 +273,13 @@ describe('validateCheckpointAttestations', () => { it('returns true if all attestations are valid and sufficient', async () => { const checkpoint = await makeCheckpoint(signers.slice(0, 4), committee); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(result.valid).toBe(true); }); @@ -217,7 +293,13 @@ describe('validateCheckpointAttestations', () => { checkpoint.attestations[1] = checkpoint.attestations[2]; checkpoint.attestations[2] = temp; - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); assert(!result.valid); assert(result.reason === 'invalid-attestation'); expect(result.checkpoint.checkpointNumber).toEqual(checkpoint.checkpoint.number); @@ -230,7 +312,13 @@ describe('validateCheckpointAttestations', () => { it('validates a checkpoint if escape hatch is open', async () => { epochCache.isEscapeHatchOpen.mockResolvedValue(true); const checkpoint = await makeCheckpoint(signers, committee); - const result = await validateCheckpointAttestations(checkpoint, epochCache, constants, logger); + const result = await validateCheckpointAttestations( + checkpoint, + epochCache, + constants, + TEST_COORDINATION_SIGNATURE_CONTEXT, + logger, + ); expect(result.valid).toBe(true); expect(epochCache.isEscapeHatchOpen).toHaveBeenCalledWith(EpochNumber(0)); }); diff --git a/yarn-project/archiver/src/modules/validation.ts b/yarn-project/archiver/src/modules/validation.ts index 726054151729..ffb6c0f57c7f 100644 --- a/yarn-project/archiver/src/modules/validation.ts +++ b/yarn-project/archiver/src/modules/validation.ts @@ -10,7 +10,7 @@ import { } from '@aztec/stdlib/block'; import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import { type L1RollupConstants, computeQuorum, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers'; -import { ConsensusPayload } from '@aztec/stdlib/p2p'; +import { ConsensusPayload, type CoordinationSignatureContext } from '@aztec/stdlib/p2p'; export type { ValidateCheckpointResult }; @@ -18,11 +18,11 @@ export type { ValidateCheckpointResult }; * Extracts attestation information from a published checkpoint. * Returns info for each attestation, preserving array indices. */ -export function getAttestationInfoFromPublishedCheckpoint({ - checkpoint, - attestations, -}: PublishedCheckpoint): AttestationInfo[] { - const payload = ConsensusPayload.fromCheckpoint(checkpoint); +export function getAttestationInfoFromPublishedCheckpoint( + { checkpoint, attestations }: PublishedCheckpoint, + signatureContext: CoordinationSignatureContext, +): AttestationInfo[] { + const payload = ConsensusPayload.fromCheckpoint(checkpoint, signatureContext); return getAttestationInfoFromPayload(payload, attestations); } @@ -34,9 +34,10 @@ export async function validateCheckpointAttestations( publishedCheckpoint: PublishedCheckpoint, epochCache: EpochCache, constants: Pick, + signatureContext: CoordinationSignatureContext, logger?: Logger, ): Promise { - const attestorInfos = getAttestationInfoFromPublishedCheckpoint(publishedCheckpoint); + const attestorInfos = getAttestationInfoFromPublishedCheckpoint(publishedCheckpoint, signatureContext); const attestors = compactArray(attestorInfos.map(info => ('address' in info ? info.address : undefined))); const { checkpoint, attestations } = publishedCheckpoint; const headerHash = checkpoint.header.hash(); diff --git a/yarn-project/archiver/src/store/block_store.ts b/yarn-project/archiver/src/store/block_store.ts index 36db8ce0a539..4da7db24cbef 100644 --- a/yarn-project/archiver/src/store/block_store.ts +++ b/yarn-project/archiver/src/store/block_store.ts @@ -10,6 +10,7 @@ import type { AztecAsyncKVStore, AztecAsyncMap, AztecAsyncSingleton, Range } fro import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type BlockData, + type BlockDataWithCheckpointContext, BlockHash, Body, CheckpointedL2Block, @@ -45,6 +46,7 @@ import { import { BlockAlreadyCheckpointedError, BlockArchiveNotConsistentError, + BlockCheckpointNumberNotSequentialError, BlockIndexNotSequentialError, BlockNotFoundError, BlockNumberNotSequentialError, @@ -56,7 +58,6 @@ import { ProposedCheckpointArchiveRootMismatchError, ProposedCheckpointNotSequentialError, ProposedCheckpointPromotionNotSequentialError, - ProposedCheckpointStaleError, } from '../errors.js'; export { TxReceipt, type TxEffect, type TxHash } from '@aztec/stdlib/tx'; @@ -84,6 +85,7 @@ type CommonCheckpointStorage = { type CheckpointStorage = CommonCheckpointStorage & { l1: Buffer; attestations: Buffer[]; + feeAssetPriceModifier: string; }; /** Storage format for a proposed checkpoint (attested but not yet L1-confirmed). */ @@ -101,7 +103,10 @@ export class BlockStore { /** Map block number to block data */ #blocks: AztecAsyncMap; - /** Map checkpoint number to checkpoint data */ + /** Map keyed by checkpoint number holding proposed (locally-validated, not yet L1-confirmed) checkpoints. */ + #proposedCheckpoints: AztecAsyncMap; + + /** Map checkpoint number to checkpoint data for mined checkpoints only */ #checkpoints: AztecAsyncMap; /** Map slot number to checkpoint number, for looking up checkpoints by slot range. */ @@ -134,9 +139,6 @@ export class BlockStore { /** Index mapping block archive to block number */ #blockArchiveIndex: AztecAsyncMap; - /** Singleton: assumes max 1-deep pipeline. For deeper pipelining, replace with a map keyed by checkpoint number. */ - #proposedCheckpoint: AztecAsyncSingleton; - #log = createLogger('archiver:block_store'); constructor(private db: AztecAsyncKVStore) { @@ -152,7 +154,7 @@ export class BlockStore { this.#pendingChainValidationStatus = db.openSingleton('archiver_pending_chain_validation_status'); this.#checkpoints = db.openMap('archiver_checkpoints'); this.#slotToCheckpoint = db.openMap('archiver_slot_to_checkpoint'); - this.#proposedCheckpoint = db.openSingleton('proposed_checkpoint_data'); + this.#proposedCheckpoints = db.openMap('archiver_proposed_checkpoints'); } /** @@ -188,8 +190,7 @@ export class BlockStore { // Extract the latest block and checkpoint numbers const previousBlockNumber = await this.getLatestL2BlockNumber(); - const proposedCheckpointNumber = await this.getProposedCheckpointNumber(); - const previousCheckpointNumber = await this.getLatestCheckpointNumber(); + const latestCheckpointNumber = await this.getLatestCheckpointNumber(); // Verify we're not overwriting checkpointed blocks const lastCheckpointedBlockNumber = await this.getCheckpointedL2BlockNumber(); @@ -207,19 +208,14 @@ export class BlockStore { throw new BlockNumberNotSequentialError(blockNumber, previousBlockNumber); } - // The same check as above but for checkpoints. Accept the block if either the confirmed - // checkpoint or the pending (locally validated but not yet confirmed) checkpoint matches. + // Accept the block if either the confirmed checkpoint or a pending checkpoint matches + // the expected predecessor. We look for a pending entry at exactly blockCheckpointNumber - 1. const expectedCheckpointNumber = blockCheckpointNumber - 1; - if ( - !opts.force && - previousCheckpointNumber !== expectedCheckpointNumber && - proposedCheckpointNumber !== expectedCheckpointNumber - ) { - const [reported, source]: [CheckpointNumber, 'confirmed' | 'proposed'] = - proposedCheckpointNumber > previousCheckpointNumber - ? [proposedCheckpointNumber, 'proposed'] - : [previousCheckpointNumber, 'confirmed']; - throw new CheckpointNumberNotSequentialError(blockCheckpointNumber, reported, source); + const hasPendingAtExpected = await this.#proposedCheckpoints.hasAsync(expectedCheckpointNumber); + if (!opts.force && latestCheckpointNumber !== expectedCheckpointNumber && !hasPendingAtExpected) { + const [latestPendingKey] = await toArray(this.#proposedCheckpoints.keysAsync({ reverse: true, limit: 1 })); + const previous = CheckpointNumber(Math.max(latestCheckpointNumber, latestPendingKey ?? 0)); + throw new BlockCheckpointNumberNotSequentialError(blockNumber, blockCheckpointNumber, previous); } // Extract the previous block if there is one and see if it is for the same checkpoint or not @@ -322,15 +318,15 @@ export class BlockStore { checkpointNumber: checkpoint.checkpoint.number, startBlock: checkpoint.checkpoint.blocks[0].number, blockCount: checkpoint.checkpoint.blocks.length, + feeAssetPriceModifier: checkpoint.checkpoint.feeAssetPriceModifier.toString(), }); // Update slot-to-checkpoint index await this.#slotToCheckpoint.set(checkpoint.checkpoint.header.slotNumber, checkpoint.checkpoint.number); - } - // Clear the proposed checkpoint if any of the confirmed checkpoints match or supersede it - const lastConfirmedCheckpointNumber = checkpoints[checkpoints.length - 1].checkpoint.number; - await this.clearProposedCheckpointIfSuperseded(lastConfirmedCheckpointNumber); + // Remove proposed checkpoint if it exists, since L1 is authoritative + await this.#proposedCheckpoints.delete(checkpoint.checkpoint.number); + } await this.#lastSynchedL1Block.set(checkpoints[checkpoints.length - 1].l1.blockNumber); return true; @@ -374,6 +370,7 @@ export class BlockStore { checkpointNumber: incoming.checkpoint.number, startBlock: incoming.checkpoint.blocks[0].number, blockCount: incoming.checkpoint.blocks.length, + feeAssetPriceModifier: incoming.checkpoint.feeAssetPriceModifier.toString(), }); // Update the sync point to reflect the new L1 block await this.#lastSynchedL1Block.set(incoming.l1.blockNumber); @@ -391,24 +388,27 @@ export class BlockStore { return undefined; } - const previousCheckpointData = await this.getCheckpointData(previousCheckpointNumber); - if (previousCheckpointData === undefined) { + // Check across both proposed and mined checkpoints + const predecessor = + (await this.getProposedCheckpointByNumber(previousCheckpointNumber)) ?? + (await this.getCheckpointData(previousCheckpointNumber)); + + if (!predecessor) { throw new CheckpointNotFoundError(previousCheckpointNumber); } - const previousBlockNumber = BlockNumber(previousCheckpointData.startBlock + previousCheckpointData.blockCount - 1); + const previousBlockNumber = BlockNumber(predecessor.startBlock + predecessor.blockCount - 1); const previousBlock = await this.getBlock(previousBlockNumber); if (previousBlock === undefined) { throw new BlockNotFoundError(previousBlockNumber); } - return previousBlock; } /** * Validates that blocks are sequential, have correct indexes, and chain via archive roots. * This is the same validation used for both confirmed checkpoints (addCheckpoints) and - * proposed checkpoints (setProposedCheckpoint). + * proposed checkpoints (addProposedCheckpoint). */ private validateCheckpointBlocks(blocks: L2Block[], previousBlock: L2Block | undefined): void { for (const block of blocks) { @@ -535,11 +535,8 @@ export class BlockStore { this.#log.debug(`Removed checkpoint ${c}`); } - // Clear any proposed checkpoint that was orphaned by the removal (its base chain no longer exists) - const proposedCheckpointNumber = await this.getProposedCheckpointNumber(); - if (proposedCheckpointNumber > checkpointNumber) { - await this.#proposedCheckpoint.delete(); - } + // Evict all pending checkpoints > checkpointNumber (their base chain no longer exists) + await this.evictProposedCheckpointsFrom(CheckpointNumber(checkpointNumber + 1)); return { blocksRemoved }; }); @@ -588,6 +585,7 @@ export class BlockStore { checkpointNumber: CheckpointNumber(checkpointStorage.checkpointNumber), startBlock: BlockNumber(checkpointStorage.startBlock), blockCount: checkpointStorage.blockCount, + feeAssetPriceModifier: BigInt(checkpointStorage.feeAssetPriceModifier), l1: L1PublishedData.fromBuffer(checkpointStorage.l1), attestations: checkpointStorage.attestations.map(buf => CommitteeAttestation.fromBuffer(buf)), }; @@ -688,28 +686,34 @@ export class BlockStore { } async hasProposedCheckpoint(): Promise { - const proposed = await this.#proposedCheckpoint.getAsync(); - return proposed !== undefined; + const [key] = await toArray(this.#proposedCheckpoints.keysAsync({ limit: 1 })); + return key !== undefined; } - /** Deletes the proposed checkpoint from storage. */ - async deleteProposedCheckpoint(): Promise { - await this.#proposedCheckpoint.delete(); + /** Deletes all pending proposed checkpoints from storage. */ + async deleteProposedCheckpoints(): Promise { + for await (const key of this.#proposedCheckpoints.keysAsync()) { + await this.#proposedCheckpoints.delete(key); + } } /** - * Promotes the proposed checkpoint singleton to a confirmed checkpoint entry. - * This persists the checkpoint to the store, clears the proposed singleton, and updates the L1 sync point. - * Should only be called after the checkpoint has been validated. - * @param expectedArchiveRoot - The archive root to match against the proposed checkpoint, to guard against races. + * Promotes a specific pending checkpoint to a confirmed checkpoint entry. + * This persists the checkpoint to the store, removes only that pending entry, and updates the L1 sync point. + * Remaining pending entries (e.g. N+1, N+2) are left intact — they chain off the just-promoted one. + * @param checkpointNumber - The checkpoint number to promote. + * @param l1 - L1 published data for the checkpoint. + * @param attestations - Committee attestations. + * @param expectedArchiveRoot - Archive root guard against races. */ async promoteProposedToCheckpointed( + checkpointNumber: CheckpointNumber, l1: L1PublishedData, attestations: CommitteeAttestation[], expectedArchiveRoot: Fr, ): Promise { return await this.db.transactionAsync(async () => { - const proposed = await this.getProposedCheckpointOnly(); + const proposed = await this.getProposedCheckpointByNumber(checkpointNumber); if (!proposed) { throw new NoProposedCheckpointToPromoteError(); } @@ -733,48 +737,76 @@ export class BlockStore { checkpointNumber: proposed.checkpointNumber, startBlock: proposed.startBlock, blockCount: proposed.blockCount, + feeAssetPriceModifier: proposed.feeAssetPriceModifier.toString(), }); // Update the slot-to-checkpoint index await this.#slotToCheckpoint.set(proposed.header.slotNumber, proposed.checkpointNumber); - // Clear the proposed checkpoint singleton - await this.#proposedCheckpoint.delete(); + // Remove only this pending entry — remaining entries N+1, N+2, ... stay valid + await this.#proposedCheckpoints.delete(proposed.checkpointNumber); // Update the last synced L1 block await this.#lastSynchedL1Block.set(l1.blockNumber); }); } - /** Clears the proposed checkpoint if the given confirmed checkpoint number supersedes it. */ - async clearProposedCheckpointIfSuperseded(confirmedCheckpointNumber: CheckpointNumber): Promise { - const proposedCheckpointNumber = await this.getProposedCheckpointNumber(); - if (proposedCheckpointNumber <= confirmedCheckpointNumber) { - await this.#proposedCheckpoint.delete(); + /** + * Returns the latest pending checkpoint (highest-numbered entry), or undefined if none. + * No fallback to confirmed. + */ + async getLastProposedCheckpoint(): Promise { + const [key] = await toArray(this.#proposedCheckpoints.keysAsync({ reverse: true, limit: 1 })); + if (key === undefined) { + return undefined; } + const stored = await this.#proposedCheckpoints.getAsync(key); + return stored ? this.convertToProposedCheckpointData(stored) : undefined; } - /** Returns the proposed checkpoint data, or undefined if no proposed checkpoint exists. No fallback to confirmed. */ - async getProposedCheckpointOnly(): Promise { - const stored = await this.#proposedCheckpoint.getAsync(); - if (!stored) { - return undefined; + /** Returns the pending checkpoint for a specific checkpoint number, or undefined if not found. */ + async getProposedCheckpointByNumber(n: CheckpointNumber): Promise { + const stored = await this.#proposedCheckpoints.getAsync(n); + return stored ? this.convertToProposedCheckpointData(stored) : undefined; + } + + /** Returns all pending checkpoints in ascending checkpoint-number order. */ + async getProposedCheckpoints(): Promise { + const results: ProposedCheckpointData[] = []; + for await (const [, stored] of this.#proposedCheckpoints.entriesAsync()) { + results.push(this.convertToProposedCheckpointData(stored)); + } + return results; + } + + /** + * Evicts all pending checkpoints with checkpoint number >= fromNumber. + * Used for divergent-mined-checkpoint cleanup: when L1 mines checkpoint N with a different archive, + * all pending >= N must be evicted since they chain off the now-invalid pending N. + */ + async evictProposedCheckpointsFrom(fromNumber: CheckpointNumber): Promise { + const keysToDelete: number[] = []; + for await (const key of this.#proposedCheckpoints.keysAsync()) { + if (key >= fromNumber) { + keysToDelete.push(key); + } + } + for (const key of keysToDelete) { + await this.#proposedCheckpoints.delete(key); } - return this.convertToProposedCheckpointData(stored); } /** - * Gets the checkpoint at the proposed tip - * - pending checkpoint if it exists + * Gets the checkpoint at the proposed tip: + * - latest pending checkpoint if any exist * - fallsback to latest confirmed checkpoint otherwise - * @returns CommonCheckpointData */ - async getProposedCheckpoint(): Promise { - const stored = await this.#proposedCheckpoint.getAsync(); - if (!stored) { + async getLastCheckpoint(): Promise { + const latest = await this.getLastProposedCheckpoint(); + if (!latest) { return this.getCheckpointData(await this.getLatestCheckpointNumber()); } - return this.convertToProposedCheckpointData(stored); + return latest; } private convertToProposedCheckpointData(stored: ProposedCheckpointStorage): ProposedCheckpointData { @@ -795,7 +827,7 @@ export class BlockStore { * @returns CheckpointNumber */ async getProposedCheckpointNumber(): Promise { - const proposed = await this.getProposedCheckpoint(); + const proposed = await this.getLastCheckpoint(); if (!proposed) { return await this.getLatestCheckpointNumber(); } @@ -807,7 +839,7 @@ export class BlockStore { * @returns BlockNumber */ async getProposedCheckpointL2BlockNumber(): Promise { - const proposed = await this.getProposedCheckpoint(); + const proposed = await this.getLastCheckpoint(); if (!proposed) { return await this.getCheckpointedL2BlockNumber(); } @@ -907,6 +939,33 @@ export class BlockStore { return this.getBlockDataFromBlockStorage(blockStorage); } + /** + * Gets block metadata plus checkpoint-derived context (L1 publish info, attestations) without + * deserializing tx bodies. When the block's containing checkpoint has not yet been L1-confirmed, + * `checkpoint` and `l1` are `undefined` and `attestations` is empty. + */ + async getBlockDataWithCheckpointContext( + blockNumber: BlockNumber, + ): Promise { + const blockStorage = await this.#blocks.getAsync(blockNumber); + if (!blockStorage || !blockStorage.header) { + return undefined; + } + const data = this.getBlockDataFromBlockStorage(blockStorage); + const checkpointStorage = await this.#checkpoints.getAsync(blockStorage.checkpointNumber); + if (!checkpointStorage) { + return { data, checkpoint: undefined, l1: undefined, attestations: [] }; + } + const checkpoint = this.checkpointDataFromCheckpointStorage(checkpointStorage); + return { data, checkpoint, l1: checkpoint.l1, attestations: checkpoint.attestations }; + } + + /** Returns the checkpoint number that contains the given slot (or undefined if not found). */ + async getCheckpointNumberBySlot(slot: SlotNumber): Promise { + const checkpointNumber = await this.#slotToCheckpoint.getAsync(slot); + return checkpointNumber === undefined ? undefined : CheckpointNumber(checkpointNumber); + } + /** * Gets block metadata (without tx data) by archive root. * @param archive - The archive root of the block to return. @@ -1188,20 +1247,25 @@ export class BlockStore { return this.#lastSynchedL1Block.set(l1BlockNumber); } - /** Sets the proposed checkpoint (not yet L1-confirmed). Only accepts confirmed + 1. - * Computes archive and checkpointOutHash from the stored blocks. */ - async setProposedCheckpoint(proposed: ProposedCheckpointInput) { + /** + * Adds a proposed checkpoint to the pending queue. + * Accepts proposed.checkpointNumber === latestTip + 1, where latestTip is the highest of + * confirmed and the highest pending checkpoint number. + * Computes archive and checkpointOutHash from the stored blocks. + */ + async addProposedCheckpoint(proposed: ProposedCheckpointInput) { return await this.db.transactionAsync(async () => { - const current = await this.getProposedCheckpointNumber(); - if (proposed.checkpointNumber <= current) { - throw new ProposedCheckpointStaleError(proposed.checkpointNumber, current); - } const confirmed = await this.getLatestCheckpointNumber(); - if (proposed.checkpointNumber !== confirmed + 1) { - throw new ProposedCheckpointNotSequentialError(proposed.checkpointNumber, confirmed); + const [latestPendingKey] = await toArray(this.#proposedCheckpoints.keysAsync({ reverse: true, limit: 1 })); + const latestTip = CheckpointNumber( + latestPendingKey !== undefined ? Math.max(latestPendingKey, confirmed) : confirmed, + ); + + if (proposed.checkpointNumber !== latestTip + 1) { + throw new ProposedCheckpointNotSequentialError(proposed.checkpointNumber, latestTip); } - // Ensure the previous checkpoint + blocks exist + // Ensure the predecessor block (from pending or confirmed chain) exists const previousBlock = await this.getPreviousCheckpointBlock(proposed.checkpointNumber); const blocks: L2Block[] = []; for (let i = 0; i < proposed.blockCount; i++) { @@ -1216,7 +1280,7 @@ export class BlockStore { const archive = blocks[blocks.length - 1].archive; const checkpointOutHash = Checkpoint.getCheckpointOutHash(blocks); - await this.#proposedCheckpoint.set({ + await this.#proposedCheckpoints.set(proposed.checkpointNumber, { header: proposed.header.toBuffer(), archive: archive.toBuffer(), checkpointOutHash: checkpointOutHash.toBuffer(), diff --git a/yarn-project/archiver/src/store/kv_archiver_store.test.ts b/yarn-project/archiver/src/store/kv_archiver_store.test.ts index b3c2a718512b..f21cbd05893b 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.test.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.test.ts @@ -45,6 +45,7 @@ import { type IndexedTxEffect, TxHash } from '@aztec/stdlib/tx'; import { BlockAlreadyCheckpointedError, BlockArchiveNotConsistentError, + BlockCheckpointNumberNotSequentialError, BlockIndexNotSequentialError, BlockNumberNotSequentialError, CannotOverwriteCheckpointedBlockError, @@ -1173,7 +1174,7 @@ describe('KVArchiverDataStore', () => { indexWithinCheckpoint: IndexWithinCheckpoint(0), }); - await expect(store.addProposedBlock(block3)).rejects.toThrow(CheckpointNumberNotSequentialError); + await expect(store.addProposedBlock(block3)).rejects.toThrow(BlockCheckpointNumberNotSequentialError); }); it('allows blocks with the same checkpoint number for the current checkpoint', async () => { @@ -1246,7 +1247,7 @@ describe('KVArchiverDataStore', () => { indexWithinCheckpoint: IndexWithinCheckpoint(0), }); - await expect(store.addProposedBlock(block1)).rejects.toThrow(CheckpointNumberNotSequentialError); + await expect(store.addProposedBlock(block1)).rejects.toThrow(BlockCheckpointNumberNotSequentialError); }); it('allows adding more blocks to the same checkpoint in separate calls', async () => { @@ -1328,7 +1329,7 @@ describe('KVArchiverDataStore', () => { indexWithinCheckpoint: IndexWithinCheckpoint(0), lastArchive: block3.archive, }); - await expect(store.addProposedBlock(block4)).rejects.toThrow(CheckpointNumberNotSequentialError); + await expect(store.addProposedBlock(block4)).rejects.toThrow(BlockCheckpointNumberNotSequentialError); }); it('force option bypasses checkpoint number validation', async () => { @@ -3621,8 +3622,8 @@ describe('KVArchiverDataStore', () => { }); describe('proposedCheckpointNumber', () => { - /** Adds proposed blocks to the store so setProposedCheckpoint can validate them. - * Uses force: true to skip addProposedBlock's own chaining checks (we only want to test setProposedCheckpoint). */ + /** Adds proposed blocks to the store so addProposedCheckpoint can validate them. + * Uses force: true to skip addProposedBlock's own chaining checks (we only want to test addProposedCheckpoint). */ async function addBlocksForProposedCheckpoint( startBlock: number, blockCount: number, @@ -3649,7 +3650,7 @@ describe('KVArchiverDataStore', () => { it('stores and retrieves proposed checkpoint number', async () => { await addBlocksForProposedCheckpoint(1, 1, 1); - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(1), header: CheckpointHeader.empty(), startBlock: BlockNumber(1), @@ -3663,7 +3664,7 @@ describe('KVArchiverDataStore', () => { it('stores and retrieves proposed checkpoint data with fee fields', async () => { await addBlocksForProposedCheckpoint(1, 1, 1); - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(1), header: CheckpointHeader.empty(), startBlock: BlockNumber(1), @@ -3671,7 +3672,7 @@ describe('KVArchiverDataStore', () => { totalManaUsed: 12345n, feeAssetPriceModifier: -75n, }); - const pending = await store.blockStore.getProposedCheckpointOnly(); + const pending = await store.blockStore.getLastProposedCheckpoint(); expect(pending).toBeDefined(); expect(pending!.checkpointNumber).toBe(1); expect(pending!.totalManaUsed).toBe(12345n); @@ -3690,7 +3691,7 @@ describe('KVArchiverDataStore', () => { await addBlocksForProposedCheckpoint(2, 1, 2, checkpoint1.checkpoint.blocks[0].archive); // Set proposed checkpoint to 2 (attested but not yet on L1) - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), @@ -3725,7 +3726,7 @@ describe('KVArchiverDataStore', () => { // Try to set proposed checkpoint to 3 (confirmed=1, expected=2) await expect( - store.blockStore.setProposedCheckpoint({ + store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(3), header: CheckpointHeader.empty(), startBlock: BlockNumber(1), @@ -3747,10 +3748,9 @@ describe('KVArchiverDataStore', () => { ); await store.addCheckpoints([checkpoint1]); - // Try to set proposed checkpoint to 1 (confirmed=1, expected=2). - // With fallback behavior, getProposedCheckpointNumber returns 1 (confirmed), so this triggers the stale check. + // Try to set proposed checkpoint to 1 (confirmed=1, expected=2). Not sequential. await expect( - store.blockStore.setProposedCheckpoint({ + store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(1), header: CheckpointHeader.empty(), startBlock: BlockNumber(1), @@ -3758,7 +3758,7 @@ describe('KVArchiverDataStore', () => { totalManaUsed: 100n, feeAssetPriceModifier: 50n, }), - ).rejects.toThrow('Stale'); + ).rejects.toThrow('not sequential'); // Proposed checkpoint should remain unset expect(await store.blockStore.hasProposedCheckpoint()).toBe(false); @@ -3784,7 +3784,7 @@ describe('KVArchiverDataStore', () => { await addBlocksForProposedCheckpoint(3, 1, 3, checkpoint2.checkpoint.blocks[0].archive); // Set proposed checkpoint to 3 - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(3), header: CheckpointHeader.empty(), startBlock: BlockNumber(3), @@ -3819,7 +3819,7 @@ describe('KVArchiverDataStore', () => { await addBlocksForProposedCheckpoint(3, 1, 3, checkpoint2.checkpoint.blocks[0].archive); // Set pending to 3 (confirmed=2, 3===2+1 ✓) - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(3), header: CheckpointHeader.empty(), startBlock: BlockNumber(3), @@ -3846,7 +3846,7 @@ describe('KVArchiverDataStore', () => { await addBlocksForProposedCheckpoint(2, 1, 2, checkpoint1.checkpoint.blocks[0].archive); // Set proposed checkpoint to 2 (attested but not on L1 yet) - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), @@ -3879,7 +3879,7 @@ describe('KVArchiverDataStore', () => { await addBlocksForProposedCheckpoint(2, 1, 2, checkpoint1.checkpoint.blocks[0].archive); // Set proposed checkpoint to 2 - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), @@ -3897,8 +3897,8 @@ describe('KVArchiverDataStore', () => { }); await expect(store.addProposedBlock(block3)).rejects.toThrow( - // Error should report the proposed checkpoint number (2), not the confirmed one (1) - 'Cannot insert new checkpoint 4 given previous proposed checkpoint number is 2', + // Error should report the latest known checkpoint number (the pending one, 2), not the confirmed (1) + 'Cannot insert new block 3 for checkpoint 4 given previous checkpoint number is 2', ); }); @@ -3920,7 +3920,7 @@ describe('KVArchiverDataStore', () => { }); await expect(store.addProposedBlock(block2)).rejects.toThrow( - 'Cannot insert new checkpoint 4 given previous confirmed checkpoint number is 1', + 'Cannot insert new block 2 for checkpoint 4 given previous checkpoint number is 1', ); }); @@ -3951,7 +3951,7 @@ describe('KVArchiverDataStore', () => { await addBlocksForProposedCheckpoint(2, 1, 2, checkpoint1.checkpoint.blocks[0].archive); // Set proposed checkpoint - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), @@ -3977,7 +3977,7 @@ describe('KVArchiverDataStore', () => { // Add blocks and set proposed checkpoint 2 await addBlocksForProposedCheckpoint(2, 1, 2, checkpoint1.checkpoint.blocks[0].archive); - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), @@ -4024,7 +4024,7 @@ describe('KVArchiverDataStore', () => { await store.addProposedBlock(block2, { force: true }); // Set proposed checkpoint 2 - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), @@ -4033,7 +4033,7 @@ describe('KVArchiverDataStore', () => { feeAssetPriceModifier: 50n, }); - const proposed = await store.getProposedCheckpointOnly(); + const proposed = await store.getLastProposedCheckpoint(); return { checkpoint1, proposed: proposed! }; } @@ -4042,24 +4042,24 @@ describe('KVArchiverDataStore', () => { const l1 = makeL1PublishedData(20); const attestations = [CommitteeAttestation.random()]; - await store.promoteProposedToCheckpointed(l1, attestations, proposed.archive.root); + await store.promoteProposedToCheckpointed(CheckpointNumber(2), l1, attestations, proposed.archive.root); expect(await store.blockStore.hasProposedCheckpoint()).toBe(false); expect(await store.blockStore.getLatestCheckpointNumber()).toBe(2); }); it('throws when no proposed checkpoint exists', async () => { - await expect(store.promoteProposedToCheckpointed(makeL1PublishedData(20), [], Fr.random())).rejects.toThrow( - 'no proposed checkpoint exists', - ); + await expect( + store.promoteProposedToCheckpointed(CheckpointNumber(2), makeL1PublishedData(20), [], Fr.random()), + ).rejects.toThrow('no proposed checkpoint exists'); }); it('throws on archive root mismatch', async () => { await setupProposedCheckpoint(); - await expect(store.promoteProposedToCheckpointed(makeL1PublishedData(20), [], Fr.random())).rejects.toThrow( - 'archive root mismatch', - ); + await expect( + store.promoteProposedToCheckpointed(CheckpointNumber(2), makeL1PublishedData(20), [], Fr.random()), + ).rejects.toThrow('archive root mismatch'); // Proposed checkpoint should still exist (transaction rolled back) expect(await store.blockStore.hasProposedCheckpoint()).toBe(true); @@ -4102,7 +4102,7 @@ describe('KVArchiverDataStore', () => { await store.addProposedBlock(block2, { force: true }); // Set proposed checkpoint - await store.blockStore.setProposedCheckpoint({ + await store.blockStore.addProposedCheckpoint({ checkpointNumber: CheckpointNumber(2), header: CheckpointHeader.empty(), startBlock: BlockNumber(2), diff --git a/yarn-project/archiver/src/store/kv_archiver_store.ts b/yarn-project/archiver/src/store/kv_archiver_store.ts index a0814222a2e3..76cc4c2495bb 100644 --- a/yarn-project/archiver/src/store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/store/kv_archiver_store.ts @@ -626,13 +626,13 @@ export class KVArchiverDataStore implements ContractDataSource { } /** Returns the checkpoint data at the proposed tip */ - public getProposedCheckpoint(): Promise { - return this.#blockStore.getProposedCheckpoint(); + public getLastCheckpoint(): Promise { + return this.#blockStore.getLastCheckpoint(); } /** Returns the proposed checkpoint data, or undefined if no proposed checkpoint exists. No fallback to confirmed. */ - public getProposedCheckpointOnly(): Promise { - return this.#blockStore.getProposedCheckpointOnly(); + public getLastProposedCheckpoint(): Promise { + return this.#blockStore.getLastProposedCheckpoint(); } /** @@ -640,26 +640,48 @@ export class KVArchiverDataStore implements ContractDataSource { * @param proposedCheckpoint * @returns */ - public setProposedCheckpoint(proposedCheckpoint: ProposedCheckpointInput): Promise { - return this.#blockStore.setProposedCheckpoint(proposedCheckpoint); + public addProposedCheckpoint(proposedCheckpoint: ProposedCheckpointInput): Promise { + return this.#blockStore.addProposedCheckpoint(proposedCheckpoint); } - /** Deletes the proposed checkpoint from storage. */ - public deleteProposedCheckpoint(): Promise { - return this.#blockStore.deleteProposedCheckpoint(); + /** Deletes all pending proposed checkpoints from storage. */ + public deleteProposedCheckpoints(): Promise { + return this.#blockStore.deleteProposedCheckpoints(); + } + + /** Returns the pending checkpoint for a specific checkpoint number, or undefined if not found. */ + public getProposedCheckpointByNumber(n: CheckpointNumber): Promise { + return this.#blockStore.getProposedCheckpointByNumber(n); + } + + /** Returns all pending checkpoints in ascending checkpoint-number order. */ + public getProposedCheckpoints(): Promise { + return this.#blockStore.getProposedCheckpoints(); + } + + /** + * Evicts all pending checkpoints with checkpoint number >= fromNumber. + * Used for divergent-mined-checkpoint cleanup. + */ + public evictProposedCheckpointsFrom(fromNumber: CheckpointNumber): Promise { + return this.#blockStore.evictProposedCheckpointsFrom(fromNumber); } /** - * Promotes the proposed checkpoint to a confirmed checkpoint entry. + * Promotes a specific pending checkpoint to a confirmed checkpoint entry. * Should only be called after the checkpoint has been validated. + * @param checkpointNumber - The checkpoint number to promote. + * @param l1 - L1 published data for the checkpoint. + * @param attestations - Committee attestations. * @param expectedArchiveRoot - The archive root to match against the proposed checkpoint, to guard against races. */ public promoteProposedToCheckpointed( + checkpointNumber: CheckpointNumber, l1: L1PublishedData, attestations: CommitteeAttestation[], expectedArchiveRoot: Fr, ): Promise { - return this.#blockStore.promoteProposedToCheckpointed(l1, attestations, expectedArchiveRoot); + return this.#blockStore.promoteProposedToCheckpointed(checkpointNumber, l1, attestations, expectedArchiveRoot); } /** @@ -707,6 +729,21 @@ export class KVArchiverDataStore implements ContractDataSource { return this.#blockStore.getCheckpointDataForSlotRange(startSlot, endSlot); } + /** Returns lightweight checkpoint metadata for a range of checkpoints. */ + getCheckpointDataRange(from: CheckpointNumber, limit: number): Promise { + return this.#blockStore.getRangeOfCheckpoints(from, limit); + } + + /** Returns the checkpoint number for a given slot, if one exists. */ + getCheckpointNumberBySlot(slot: SlotNumber): Promise { + return this.#blockStore.getCheckpointNumberBySlot(slot); + } + + /** Returns block metadata plus checkpoint-derived context (L1 publish info, attestations). */ + getBlockDataWithCheckpointContext(blockNumber: BlockNumber) { + return this.#blockStore.getBlockDataWithCheckpointContext(blockNumber); + } + /** * Gets all blocks that have the given slot number. * @param slotNumber - The slot number to search for. diff --git a/yarn-project/archiver/src/store/l2_tips_cache.ts b/yarn-project/archiver/src/store/l2_tips_cache.ts index 1f34a0628c8f..bb2b26d522b6 100644 --- a/yarn-project/archiver/src/store/l2_tips_cache.ts +++ b/yarn-project/archiver/src/store/l2_tips_cache.ts @@ -108,7 +108,7 @@ export class L2TipsCache { private async getCheckpointIdForProposedCheckpoint( checkpointedBlockData: Pick, ): Promise { - const checkpointData = await this.blockStore.getProposedCheckpointOnly(); + const checkpointData = await this.blockStore.getLastProposedCheckpoint(); if (!checkpointData) { return this.getCheckpointIdForBlock(checkpointedBlockData); } diff --git a/yarn-project/archiver/src/test/fake_l1_state.ts b/yarn-project/archiver/src/test/fake_l1_state.ts index 790a5988f2fb..005cb8577c73 100644 --- a/yarn-project/archiver/src/test/fake_l1_state.ts +++ b/yarn-project/archiver/src/test/fake_l1_state.ts @@ -15,12 +15,8 @@ import { CommitteeAttestation, CommitteeAttestationsAndSigners, L2Block } from ' import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { getSlotAtTimestamp } from '@aztec/stdlib/epoch-helpers'; import { InboxLeaf } from '@aztec/stdlib/messaging'; -import { ConsensusPayload, SignatureDomainSeparator } from '@aztec/stdlib/p2p'; -import { - makeAndSignCommitteeAttestationsAndSigners, - makeCheckpointAttestationFromCheckpoint, - mockCheckpointAndMessages, -} from '@aztec/stdlib/testing'; +import { ConsensusPayload, getHashedSignaturePayloadTypedData } from '@aztec/stdlib/p2p'; +import { mockCheckpointAndMessages } from '@aztec/stdlib/testing'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -519,6 +515,9 @@ export class FakeL1State { const publicClient = mock(); publicClient.getChainId.mockResolvedValue(1); + // Several consumers (CalldataRetriever, ArchiverL1Synchronizer) derive the EIP-712 signing + // context from `publicClient.chain.id`. Pin it so it matches `getSignatureContext()` below. + (publicClient as unknown as { chain: { id: number } }).chain = { id: 1 }; publicClient.getBlockNumber.mockImplementation(() => Promise.resolve(this.l1BlockNumber)); publicClient.getBlock.mockImplementation((async (args: { blockNumber?: bigint; blockTag?: string } = {}) => { @@ -660,9 +659,11 @@ export class FakeL1State { checkpoint: Checkpoint, signers: Secp256k1Signer[], ): Promise<{ tx: Transaction; attestationsHash: Buffer32; payloadDigest: Buffer32 }> { + const signatureContext = this.getSignatureContext(); + const consensusPayload = ConsensusPayload.fromCheckpoint(checkpoint, signatureContext); + const attestationDigest = getHashedSignaturePayloadTypedData(consensusPayload); const attestations = signers - .map(signer => makeCheckpointAttestationFromCheckpoint(checkpoint, signer)) - .map(attestation => CommitteeAttestation.fromSignature(attestation.signature)) + .map(signer => CommitteeAttestation.fromSignature(signer.sign(attestationDigest))) .map(committeeAttestation => committeeAttestation.toViem()); const header = checkpoint.header.toViem(); @@ -670,11 +671,15 @@ export class FakeL1State { const archive = toHex(checkpoint.archive.root.toBuffer()); const attestationsAndSigners = new CommitteeAttestationsAndSigners( attestations.map(attestation => CommitteeAttestation.fromViem(attestation)), + signatureContext, ); - const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( - attestationsAndSigners, - signers[0], + // Fall back to a random signer when no attesters are provided, so tests that + // don't care about the proposer identity (e.g. sync tests) still produce a + // valid-looking signature for the attestationsAndSigners struct. + const proposerSigner = signers[0] ?? Secp256k1Signer.random(); + const attestationsAndSignersSignature = proposerSigner.sign( + getHashedSignaturePayloadTypedData(attestationsAndSigners), ); const packedAttestations = attestationsAndSigners.getPackedAttestations(); @@ -715,9 +720,7 @@ export class FakeL1State { ); // Compute payloadDigest (same logic as CalldataRetriever) - const consensusPayload = ConsensusPayload.fromCheckpoint(checkpoint); - const payloadToSign = consensusPayload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation); - const payloadDigest = Buffer32.fromString(keccak256(payloadToSign)); + const payloadDigest = getHashedSignaturePayloadTypedData(consensusPayload); const tx = { input: multiCallInput, @@ -729,6 +732,13 @@ export class FakeL1State { return { tx, attestationsHash, payloadDigest }; } + private getSignatureContext() { + return { + chainId: 1, + rollupAddress: this.config.rollupAddress, + }; + } + /** Extracts the CommitteeAttestations struct definition from RollupAbi for hash computation. */ private getCommitteeAttestationsStructDef(): AbiParameter { const proposeFunction = RollupAbi.find(item => item.type === 'function' && item.name === 'propose') as diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 3e5d427dc358..484c804fe778 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -321,6 +321,26 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { }; } + public getCheckpointData(_n: CheckpointNumber): Promise { + return Promise.resolve(undefined); + } + + public getCheckpointDataRange(_from: CheckpointNumber, _limit: number): Promise { + return Promise.resolve([]); + } + + public getCheckpointNumberBySlot(_slot: SlotNumber): Promise { + return Promise.resolve(undefined); + } + + public async getBlockDataWithCheckpointContext(number: BlockNumber) { + const data = await this.getBlockData(number); + if (!data) { + return undefined; + } + return { data, checkpoint: undefined, l1: undefined, attestations: [] }; + } + public async getBlockDataByArchive(archive: Fr): Promise { const block = this.l2Blocks.find(b => b.archive.root.equals(archive)); if (!block) { @@ -356,6 +376,7 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { ), startBlock: checkpoint.blocks[0].number, blockCount: checkpoint.blocks.length, + feeAssetPriceModifier: checkpoint.feeAssetPriceModifier, attestations: [], l1: this.mockL1DataForCheckpoint(checkpoint), }), @@ -558,11 +579,11 @@ export class MockL2BlockSource implements L2BlockSource, ContractDataSource { return Promise.resolve({ valid: true }); } - getProposedCheckpoint(): Promise { + getLastCheckpoint(): Promise { return Promise.resolve(undefined); } - getProposedCheckpointOnly(): Promise { + getLastProposedCheckpoint(): Promise { return Promise.resolve(undefined); } diff --git a/yarn-project/aztec-node/src/aztec-node/block_response_helpers.ts b/yarn-project/aztec-node/src/aztec-node/block_response_helpers.ts new file mode 100644 index 000000000000..518027789433 --- /dev/null +++ b/yarn-project/aztec-node/src/aztec-node/block_response_helpers.ts @@ -0,0 +1,131 @@ +import { BlockNumber } from '@aztec/foundation/branded-types'; +import { + type BlockData, + type BlockDataWithCheckpointContext, + type CheckpointedL2Block, + type CommitteeAttestation, + L2Block, +} from '@aztec/stdlib/block'; +import type { CheckpointData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import { + type BlockIncludeOptions, + type BlockResponse, + type CheckpointIncludeOptions, + type CheckpointResponse, + l1PublishInfoFromL1PublishedData, +} from '@aztec/stdlib/interfaces/client'; + +/** Projects a full {@link L2Block} into a {@link BlockResponse}, attaching L1 / attestation context when provided. */ +export async function blockResponseFromL2Block( + block: L2Block, + options: BlockIncludeOptions, + context?: { l1?: BlockDataWithCheckpointContext['l1']; attestations?: CommitteeAttestation[] }, +): Promise { + const response: BlockResponse = { + header: block.header, + archive: block.archive, + hash: await block.hash(), + checkpointNumber: block.checkpointNumber, + indexWithinCheckpoint: block.indexWithinCheckpoint, + number: block.number, + }; + if (options.includeTransactions) { + (response as BlockResponse).body = block.body; + } + if (options.includeL1PublishInfo) { + (response as BlockResponse).l1 = l1PublishInfoFromL1PublishedData(context?.l1); + } + if (options.includeAttestations) { + (response as BlockResponse).attestations = context?.attestations ?? []; + } + return response; +} + +/** Projects metadata-only {@link BlockData} into a {@link BlockResponse}. */ +export function blockResponseFromBlockData( + data: BlockData, + blockNumber: BlockNumber, + options: BlockIncludeOptions, + context?: { l1?: BlockDataWithCheckpointContext['l1']; attestations?: CommitteeAttestation[] }, +): BlockResponse { + const response: BlockResponse = { + header: data.header, + archive: data.archive, + hash: data.blockHash, + checkpointNumber: data.checkpointNumber, + indexWithinCheckpoint: data.indexWithinCheckpoint, + number: blockNumber, + }; + if (options.includeL1PublishInfo) { + (response as BlockResponse).l1 = l1PublishInfoFromL1PublishedData(context?.l1); + } + if (options.includeAttestations) { + (response as BlockResponse).attestations = context?.attestations ?? []; + } + return response; +} + +/** Projects a {@link CheckpointedL2Block} into a {@link BlockResponse}. */ +export function blockResponseFromCheckpointedL2Block( + cp: CheckpointedL2Block, + options: BlockIncludeOptions, +): Promise { + return blockResponseFromL2Block(cp.block, options, { l1: cp.l1, attestations: cp.attestations }); +} + +/** Projects a {@link PublishedCheckpoint} into a {@link CheckpointResponse}. */ +export async function checkpointResponseFromPublishedCheckpoint( + pc: PublishedCheckpoint, + options: CheckpointIncludeOptions, +): Promise { + const response: CheckpointResponse = { + number: pc.checkpoint.number, + header: pc.checkpoint.header, + archive: pc.checkpoint.archive, + checkpointOutHash: pc.checkpoint.getCheckpointOutHash(), + startBlock: pc.checkpoint.blocks[0]?.number ?? BlockNumber.ZERO, + blockCount: pc.checkpoint.blocks.length, + feeAssetPriceModifier: pc.checkpoint.feeAssetPriceModifier, + }; + if (options.includeBlocks) { + (response as CheckpointResponse).blocks = await Promise.all( + pc.checkpoint.blocks.map(block => + blockResponseFromL2Block(block, { + includeTransactions: options.includeTransactions, + includeL1PublishInfo: false, + includeAttestations: false, + }), + ), + ); + } + if (options.includeL1PublishInfo) { + (response as CheckpointResponse).l1 = l1PublishInfoFromL1PublishedData(pc.l1); + } + if (options.includeAttestations) { + (response as CheckpointResponse).attestations = pc.attestations; + } + return response; +} + +/** Projects metadata-only {@link CheckpointData} into a {@link CheckpointResponse}. `includeBlocks` is ignored (no blocks loaded). */ +export function checkpointResponseFromCheckpointData( + cd: CheckpointData, + options: CheckpointIncludeOptions, +): CheckpointResponse { + const response: CheckpointResponse = { + number: cd.checkpointNumber, + header: cd.header, + archive: cd.archive, + checkpointOutHash: cd.checkpointOutHash, + startBlock: cd.startBlock, + blockCount: cd.blockCount, + feeAssetPriceModifier: cd.feeAssetPriceModifier, + }; + if (options.includeL1PublishInfo) { + (response as CheckpointResponse).l1 = l1PublishInfoFromL1PublishedData(cd.l1); + } + if (options.includeAttestations) { + (response as CheckpointResponse).attestations = cd.attestations; + } + return response; +} diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index b2ce9c7db13e..9df49ffbe54f 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -50,6 +50,7 @@ import { dirname, join, resolve } from 'path'; import { fileURLToPath } from 'url'; import { generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; +import { blockResponseFromL2Block } from './block_response_helpers.js'; import { type AztecNodeConfig, getConfigEnvVars } from './config.js'; import { AztecNodeService } from './server.js'; @@ -389,28 +390,34 @@ describe('aztec node', () => { let block2: L2Block; beforeEach(() => { - block1 = L2Block.empty(); - block2 = L2Block.empty(); + block1 = L2Block.empty( + BlockHeader.empty({ globalVariables: GlobalVariables.empty({ blockNumber: BlockNumber(1) }) }), + ); + block2 = L2Block.empty( + BlockHeader.empty({ globalVariables: GlobalVariables.empty({ blockNumber: BlockNumber(2) }) }), + ); l2BlockSource.getBlockNumber.mockResolvedValue(BlockNumber(2)); }); - it('returns requested block number', async () => { + it('returns requested block number with transactions', async () => { l2BlockSource.getL2Block.mockResolvedValue(block1); - expect(await node.getBlock(BlockNumber(1))).toEqual(block1); + const expected = await blockResponseFromL2Block(block1, { includeTransactions: true }); + expect(await node.getBlock(BlockNumber(1), { includeTransactions: true })).toEqual(expected); expect(l2BlockSource.getL2Block).toHaveBeenCalledWith(BlockNumber(1)); }); - it('returns latest block', async () => { + it('returns latest block with transactions', async () => { l2BlockSource.getL2Block.mockResolvedValue(block2); - expect(await node.getBlock('latest')).toEqual(block2); + const expected = await blockResponseFromL2Block(block2, { includeTransactions: true }); + expect(await node.getBlock('latest', { includeTransactions: true })).toEqual(expected); expect(l2BlockSource.getL2Block).toHaveBeenCalledWith(2); }); it('returns undefined for non-existent block', async () => { l2BlockSource.getL2Block.mockResolvedValue(undefined); - expect(await node.getBlock(BlockNumber(3))).toEqual(undefined); - expect(l2BlockSource.getL2Block).toHaveBeenCalledWith(3); + expect(await node.getBlock(BlockNumber(3), { includeTransactions: true })).toEqual(undefined); + expect(l2BlockSource.getL2Block).toHaveBeenCalledWith(BlockNumber(3)); }); }); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 82dec2d5f7f3..a48a5dcd9bbf 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -50,14 +50,13 @@ import { import { CollectionLimitsConfig, PublicSimulatorConfig } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { - type BlockData, BlockHash, type BlockParameter, type DataInBlock, L2Block, type L2BlockSource, + inspectBlockParameter, } from '@aztec/stdlib/block'; -import type { PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import type { ContractClassPublic, ContractDataSource, @@ -67,15 +66,22 @@ import type { } from '@aztec/stdlib/contract'; import { GasFees, type ManaUsageEstimate } from '@aztec/stdlib/gas'; import { computePublicDataTreeLeafSlot } from '@aztec/stdlib/hash'; -import { - type AztecNode, - type AztecNodeAdmin, - type AztecNodeAdminConfig, - AztecNodeAdminConfigSchema, - type AztecNodeDebug, - type GetContractClassLogsResponse, - type GetPublicLogsResponse, +import type { + AztecNode, + AztecNodeAdmin, + AztecNodeAdminConfig, + AztecNodeDebug, + BlockIncludeOptions, + BlockResponse, + ChainTip, + ChainTips, + CheckpointIncludeOptions, + CheckpointParameter, + CheckpointResponse, + GetContractClassLogsResponse, + GetPublicLogsResponse, } from '@aztec/stdlib/interfaces/client'; +import { AztecNodeAdminConfigSchema } from '@aztec/stdlib/interfaces/client'; import { type AllowedElement, type ClientProtocolCircuitVerifier, @@ -129,6 +135,12 @@ import { createPublicClient } from 'viem'; import { createSentinel } from '../sentinel/factory.js'; import { Sentinel } from '../sentinel/sentinel.js'; +import { + blockResponseFromBlockData, + blockResponseFromL2Block, + checkpointResponseFromCheckpointData, + checkpointResponseFromPublishedCheckpoint, +} from './block_response_helpers.js'; import { type AztecNodeConfig, createKeyStoreForValidator } from './config.js'; import { NodeMetrics } from './node_metrics.js'; @@ -197,10 +209,281 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb return status.syncSummary; } + public async getChainTips(): Promise { + const { proposed, checkpointed, proven, finalized } = await this.blockSource.getL2Tips(); + return { proposed, checkpointed, proven, finalized }; + } + public getL2Tips() { return this.blockSource.getL2Tips(); } + public async getBlockHeader(number: BlockNumber | 'latest'): Promise { + const resolvedNumber = number === 'latest' ? await this.blockSource.getBlockNumber() : number; + if (resolvedNumber === BlockNumber.ZERO) { + return this.worldStateSynchronizer.getCommitted().getInitialHeader(); + } + return this.blockSource.getBlockHeader(resolvedNumber); + } + + public async getCheckpointedBlocks(from: BlockNumber, limit: number) { + return (await this.blockSource.getCheckpointedBlocks(from, limit)) ?? []; + } + + public getCheckpointsDataForEpoch(epoch: EpochNumber) { + return this.blockSource.getCheckpointsDataForEpoch(epoch); + } + + public getBlockNumber(tip?: ChainTip): Promise { + switch (tip) { + case undefined: + case 'proposed': + return this.blockSource.getBlockNumber(); + case 'checkpointed': + return this.blockSource.getCheckpointedL2BlockNumber(); + case 'proven': + return this.blockSource.getProvenBlockNumber(); + case 'finalized': + return this.blockSource.getFinalizedL2BlockNumber(); + } + } + + public async getCheckpointNumber(tip?: ChainTip): Promise { + switch (tip) { + case undefined: + case 'proposed': + case 'checkpointed': + return await this.blockSource.getCheckpointNumber(); + case 'proven': + case 'finalized': { + const tips = await this.blockSource.getL2Tips(); + return tip === 'proven' ? tips.proven.checkpoint.number : tips.finalized.checkpoint.number; + } + } + } + + private isChainTip(value: unknown): value is ChainTip { + return value === 'proposed' || value === 'checkpointed' || value === 'proven' || value === 'finalized'; + } + + private async resolveBlockParameter( + param: BlockParameter, + ): Promise<{ number?: BlockNumber; hash?: BlockHash; archive?: Fr }> { + if (BlockHash.isBlockHash(param)) { + return { hash: param }; + } + if (typeof param === 'number') { + return { number: param as BlockNumber }; + } + if (param === 'latest') { + return { number: await this.blockSource.getBlockNumber() }; + } + if (this.isChainTip(param)) { + return { number: await this.getBlockNumber(param) }; + } + if (typeof param === 'object' && param !== null) { + if ('number' in param) { + return { number: param.number }; + } + if ('hash' in param) { + return { hash: param.hash }; + } + if ('archive' in param) { + return { archive: param.archive }; + } + } + throw new BadRequestError(`Invalid BlockParameter: ${JSON.stringify(param)}`); + } + + private async resolveCheckpointParameter( + param: CheckpointParameter, + ): Promise<{ number?: CheckpointNumber; slot?: SlotNumber }> { + if (typeof param === 'number') { + return { number: param as CheckpointNumber }; + } + if (param === 'latest') { + return { number: await this.blockSource.getCheckpointNumber() }; + } + if (this.isChainTip(param)) { + return { number: await this.getCheckpointNumber(param) }; + } + if (typeof param === 'object' && param !== null) { + if ('number' in param) { + return { number: param.number }; + } + if ('slot' in param) { + return { slot: param.slot }; + } + } + throw new BadRequestError(`Invalid CheckpointParameter: ${JSON.stringify(param)}`); + } + + public async getBlock( + param: BlockParameter, + options: Opts = {} as Opts, + ): Promise | undefined> { + const resolved = await this.resolveBlockParameter(param); + const wantTxs = !!options.includeTransactions; + const wantContext = !!options.includeL1PublishInfo || !!options.includeAttestations; + + if (resolved.hash !== undefined) { + const initial = await this.#getInitialHeaderHash(); + if (resolved.hash.equals(initial)) { + return (await this.buildGenesisBlockResponse(options)) as BlockResponse; + } + if (wantTxs) { + const block = await this.blockSource.getL2BlockByHash(resolved.hash); + if (!block) { + return undefined; + } + const ctx = wantContext ? await this.blockSource.getBlockDataWithCheckpointContext(block.number) : undefined; + return (await blockResponseFromL2Block(block, options, ctx)) as BlockResponse; + } + const data = await this.blockSource.getBlockHeaderByHash(resolved.hash); + if (!data) { + return undefined; + } + const blockNumber = data.globalVariables.blockNumber; + const ctx = wantContext ? await this.blockSource.getBlockDataWithCheckpointContext(blockNumber) : undefined; + if (ctx) { + return blockResponseFromBlockData(ctx.data, blockNumber, options, ctx) as BlockResponse; + } + const blockData = await this.blockSource.getBlockData(blockNumber); + if (!blockData) { + return undefined; + } + return blockResponseFromBlockData(blockData, blockNumber, options) as BlockResponse; + } + + if (resolved.archive !== undefined) { + if (wantTxs) { + const block = await this.blockSource.getL2BlockByArchive(resolved.archive); + if (!block) { + return undefined; + } + const ctx = wantContext ? await this.blockSource.getBlockDataWithCheckpointContext(block.number) : undefined; + return (await blockResponseFromL2Block(block, options, ctx)) as BlockResponse; + } + const data = await this.blockSource.getBlockDataByArchive(resolved.archive); + if (!data) { + return undefined; + } + const blockNumber = data.header.globalVariables.blockNumber; + const ctx = wantContext ? await this.blockSource.getBlockDataWithCheckpointContext(blockNumber) : undefined; + return blockResponseFromBlockData(data, blockNumber, options, ctx) as BlockResponse; + } + + const blockNumber = resolved.number!; + if (blockNumber === BlockNumber.ZERO) { + return (await this.buildGenesisBlockResponse(options)) as BlockResponse; + } + if (wantTxs) { + const block = await this.blockSource.getL2Block(blockNumber); + if (!block) { + return undefined; + } + const ctx = wantContext ? await this.blockSource.getBlockDataWithCheckpointContext(blockNumber) : undefined; + return (await blockResponseFromL2Block(block, options, ctx)) as BlockResponse; + } + const ctx = await this.blockSource.getBlockDataWithCheckpointContext(blockNumber); + if (!ctx) { + return undefined; + } + return blockResponseFromBlockData(ctx.data, blockNumber, options, ctx) as BlockResponse; + } + + public async getBlocks( + from: BlockNumber, + limit: number, + options: Opts = {} as Opts, + ): Promise[]> { + const wantTxs = !!options.includeTransactions; + const wantContext = !!options.includeL1PublishInfo || !!options.includeAttestations; + if (wantTxs) { + const blocks = await this.blockSource.getBlocks(from, limit); + return (await Promise.all( + blocks.map(async block => { + const ctx = wantContext ? await this.blockSource.getBlockDataWithCheckpointContext(block.number) : undefined; + return blockResponseFromL2Block(block, options, ctx); + }), + )) as BlockResponse[]; + } + const results: BlockResponse[] = []; + for (let i = 0; i < limit; i++) { + const blockNumber = BlockNumber(from + i); + const ctx = await this.blockSource.getBlockDataWithCheckpointContext(blockNumber); + if (!ctx) { + break; + } + results.push(blockResponseFromBlockData(ctx.data, blockNumber, options, ctx) as BlockResponse); + } + return results; + } + + public async getCheckpoint( + param: CheckpointParameter, + options: Opts = {} as Opts, + ): Promise | undefined> { + const resolved = await this.resolveCheckpointParameter(param); + let checkpointNumber = resolved.number; + if (checkpointNumber === undefined && resolved.slot !== undefined) { + checkpointNumber = await this.blockSource.getCheckpointNumberBySlot(resolved.slot); + } + if (checkpointNumber === undefined) { + return undefined; + } + if (options.includeBlocks) { + const [checkpoint] = await this.blockSource.getCheckpoints(checkpointNumber, 1); + if (!checkpoint) { + return undefined; + } + return (await checkpointResponseFromPublishedCheckpoint(checkpoint, options)) as CheckpointResponse; + } + const data = await this.blockSource.getCheckpointData(checkpointNumber); + if (!data) { + return undefined; + } + return checkpointResponseFromCheckpointData(data, options) as CheckpointResponse; + } + + public async getCheckpoints( + from: CheckpointNumber, + limit: number, + options: Opts = {} as Opts, + ): Promise[]> { + if (options.includeBlocks) { + const checkpoints = await this.blockSource.getCheckpoints(from, limit); + return (await Promise.all( + checkpoints.map(cp => checkpointResponseFromPublishedCheckpoint(cp, options)), + )) as CheckpointResponse[]; + } + const datas = await this.blockSource.getCheckpointDataRange(from, limit); + return datas.map(d => checkpointResponseFromCheckpointData(d, options)) as CheckpointResponse[]; + } + + private async buildGenesisBlockResponse(options: BlockIncludeOptions): Promise { + const initial = this.worldStateSynchronizer.getCommitted().getInitialHeader(); + const empty = L2Block.empty(initial); + const response: BlockResponse = { + header: empty.header, + archive: empty.archive, + hash: await this.#getInitialHeaderHash(), + checkpointNumber: empty.checkpointNumber, + indexWithinCheckpoint: empty.indexWithinCheckpoint, + number: empty.number, + }; + if (options.includeTransactions) { + (response as BlockResponse).body = empty.body; + } + if (options.includeL1PublishInfo) { + (response as BlockResponse).l1 = { published: false }; + } + if (options.includeAttestations) { + (response as BlockResponse).attestations = []; + } + return response; + } + /** * initializes the Aztec Node, wait for component to sync. * @param config - The configuration to be used by the aztec node. @@ -736,71 +1019,6 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb return nodeInfo; } - /** - * Get a block specified by its block number, block hash, or 'latest'. - * @param block - The block parameter (block number, block hash, or 'latest'). - * @returns The requested block. - */ - public async getBlock(block: BlockParameter): Promise { - if (BlockHash.isBlockHash(block)) { - return this.getBlockByHash(block); - } - const blockNumber = block === 'latest' ? await this.getBlockNumber() : (block as BlockNumber); - if (blockNumber === BlockNumber.ZERO) { - return this.buildInitialBlock(); - } - return await this.blockSource.getL2Block(blockNumber); - } - - /** - * Get a block specified by its hash. - * @param blockHash - The block hash being requested. - * @returns The requested block. - */ - public async getBlockByHash(blockHash: BlockHash): Promise { - const initialBlockHash = await this.#getInitialHeaderHash(); - if (blockHash.equals(initialBlockHash)) { - return this.buildInitialBlock(); - } - return await this.blockSource.getL2BlockByHash(blockHash); - } - - private buildInitialBlock(): L2Block { - const initialHeader = this.worldStateSynchronizer.getCommitted().getInitialHeader(); - return L2Block.empty(initialHeader); - } - - /** - * Get a block specified by its archive root. - * @param archive - The archive root being requested. - * @returns The requested block. - */ - public async getBlockByArchive(archive: Fr): Promise { - return await this.blockSource.getL2BlockByArchive(archive); - } - - /** - * Method to request blocks. Will attempt to return all requested blocks but will return only those available. - * @param from - The start of the range of blocks to return. - * @param limit - The maximum number of blocks to obtain. - * @returns The blocks requested. - */ - public async getBlocks(from: BlockNumber, limit: number): Promise { - return (await this.blockSource.getBlocks(from, BlockNumber(limit))) ?? []; - } - - public async getCheckpoints(from: CheckpointNumber, limit: number): Promise { - return (await this.blockSource.getCheckpoints(from, limit)) ?? []; - } - - public async getCheckpointedBlocks(from: BlockNumber, limit: number) { - return (await this.blockSource.getCheckpointedBlocks(from, limit)) ?? []; - } - - public getCheckpointsDataForEpoch(epochNumber: EpochNumber) { - return this.blockSource.getCheckpointsDataForEpoch(epochNumber); - } - public async getCurrentMinFees(): Promise { return await this.feeProvider.getCurrentMinFees(); } @@ -818,26 +1036,6 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb return GasFees.from({ feePerDaGas: 0n, feePerL2Gas: 0n }); } - /** - * Method to fetch the latest block number synchronized by the node. - * @returns The block number. - */ - public async getBlockNumber(): Promise { - return await this.blockSource.getBlockNumber(); - } - - public async getProvenBlockNumber(): Promise { - return await this.blockSource.getProvenBlockNumber(); - } - - public async getCheckpointedBlockNumber(): Promise { - return await this.blockSource.getCheckpointedL2BlockNumber(); - } - - public getCheckpointNumber(): Promise { - return this.blockSource.getCheckpointNumber(); - } - /** * Method to fetch the version of the package. * @returns The node package version @@ -955,7 +1153,13 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb throw new Error(`Invalid tx: ${reason}`); } - await this.p2pClient!.sendTx(tx); + try { + await this.p2pClient!.sendTx(tx); + } catch (err) { + this.metrics.receivedTx(timer.ms(), false); + this.log.warn(`Mempool rejected tx ${txHash}: ${(err as Error).message}`, { txHash }); + throw err; + } const duration = timer.ms(); this.metrics.receivedTx(duration, true); this.log.info(`Received tx ${txHash} in ${duration}ms`, { txHash }); @@ -1274,41 +1478,6 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb return preimage.leaf.value; } - public async getBlockHeader(block: BlockParameter = 'latest'): Promise { - if (BlockHash.isBlockHash(block)) { - const initialBlockHash = await this.#getInitialHeaderHash(); - if (block.equals(initialBlockHash)) { - // Block source doesn't handle initial header so we need to handle the case separately. - return this.worldStateSynchronizer.getCommitted().getInitialHeader(); - } - return this.blockSource.getBlockHeaderByHash(block); - } else { - // Block source doesn't handle initial header so we need to handle the case separately. - const blockNumber = block === 'latest' ? await this.getBlockNumber() : (block as BlockNumber); - if (blockNumber === BlockNumber.ZERO) { - return this.worldStateSynchronizer.getCommitted().getInitialHeader(); - } - return this.blockSource.getBlockHeader(block); - } - } - - /** - * Get a block header specified by its archive root. - * @param archive - The archive root being requested. - * @returns The requested block header. - */ - public async getBlockHeaderByArchive(archive: Fr): Promise { - return await this.blockSource.getBlockHeaderByArchive(archive); - } - - public getBlockData(number: BlockNumber): Promise { - return this.blockSource.getBlockData(number); - } - - public getBlockDataByArchive(archive: Fr): Promise { - return this.blockSource.getBlockDataByArchive(archive); - } - /** * Simulates the public part of a transaction with the current state. * @param tx - The transaction to simulate. @@ -1768,7 +1937,9 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb // Check it's within world state sync range if (blockNumber > blockSyncedTo) { - throw new Error(`Queried block ${block} not yet synced by the node (node is synced upto ${blockSyncedTo}).`); + throw new Error( + `Queried block ${inspectBlockParameter(block)} not yet synced by the node (node is synced upto ${blockSyncedTo}).`, + ); } this.log.debug(`Using snapshot for block ${blockNumber}, world state synced upto ${blockSyncedTo}`); @@ -1788,23 +1959,31 @@ export class AztecNodeService implements AztecNode, AztecNodeAdmin, AztecNodeDeb return snapshot; } - /** Resolves a block parameter to a block number. */ + /** Resolves any {@link BlockParameter} variant to a concrete block number. */ protected async resolveBlockNumber(block: BlockParameter): Promise { - if (block === 'latest') { - return BlockNumber(await this.blockSource.getBlockNumber()); + const resolved = await this.resolveBlockParameter(block); + if (resolved.number !== undefined) { + return resolved.number; } - if (BlockHash.isBlockHash(block)) { + if (resolved.hash !== undefined) { const initialBlockHash = await this.#getInitialHeaderHash(); - if (block.equals(initialBlockHash)) { + if (resolved.hash.equals(initialBlockHash)) { return BlockNumber.ZERO; } - const header = await this.blockSource.getBlockHeaderByHash(block); + const header = await this.blockSource.getBlockHeaderByHash(resolved.hash); + if (!header) { + throw new Error(`Block hash ${resolved.hash.toString()} not found.`); + } + return header.getBlockNumber(); + } + if (resolved.archive !== undefined) { + const header = await this.blockSource.getBlockHeaderByArchive(resolved.archive); if (!header) { - throw new Error(`Block hash ${block.toString()} not found.`); + throw new Error(`Block with archive ${resolved.archive.toString()} not found.`); } return header.getBlockNumber(); } - return block as BlockNumber; + throw new BadRequestError(`Invalid BlockParameter: ${JSON.stringify(block)}`); } /** diff --git a/yarn-project/aztec-node/src/sentinel/factory.ts b/yarn-project/aztec-node/src/sentinel/factory.ts index 251b9086a627..1bec08b6a22e 100644 --- a/yarn-project/aztec-node/src/sentinel/factory.ts +++ b/yarn-project/aztec-node/src/sentinel/factory.ts @@ -3,6 +3,7 @@ import { createLogger } from '@aztec/foundation/log'; import { createStore } from '@aztec/kv-store/lmdb-v2'; import type { P2PClient } from '@aztec/p2p'; import type { L2BlockSource } from '@aztec/stdlib/block'; +import type { ChainConfig } from '@aztec/stdlib/config'; import type { SlasherConfig } from '@aztec/stdlib/interfaces/server'; import type { DataStoreConfig } from '@aztec/stdlib/kv-store'; @@ -14,7 +15,7 @@ export async function createSentinel( epochCache: EpochCache, archiver: L2BlockSource, p2p: P2PClient, - config: SentinelConfig & DataStoreConfig & SlasherConfig, + config: SentinelConfig & DataStoreConfig & SlasherConfig & Pick, logger = createLogger('node:sentinel'), ): Promise { if (!config.sentinelEnabled) { diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.test.ts b/yarn-project/aztec-node/src/sentinel/sentinel.test.ts index b7cbd09e118c..7e54ee16cd79 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.test.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.test.ts @@ -6,7 +6,6 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { AztecLMDBStoreV2, openTmpStore } from '@aztec/kv-store/lmdb-v2'; import type { P2PClient } from '@aztec/p2p'; import { OffenseType, WANT_TO_SLASH_EVENT, type WantToSlashArgs } from '@aztec/slasher'; -import type { SlasherConfig } from '@aztec/slasher/config'; import { CommitteeAttestation, L2Block, @@ -18,7 +17,11 @@ import { import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; import { type L1RollupConstants, getEpochAtSlot, getSlotRangeForEpoch } from '@aztec/stdlib/epoch-helpers'; import type { CheckpointAttestation } from '@aztec/stdlib/p2p'; -import { makeCheckpointAttestation, makeCheckpointAttestationFromCheckpoint } from '@aztec/stdlib/testing'; +import { + TEST_COORDINATION_SIGNATURE_CONTEXT, + makeCheckpointAttestation, + makeCheckpointAttestationFromCheckpoint, +} from '@aztec/stdlib/testing'; import type { ValidatorStats, ValidatorStatusHistory, @@ -29,7 +32,7 @@ import type { import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { Sentinel } from './sentinel.js'; +import { Sentinel, type SentinelRuntimeConfig } from './sentinel.js'; import { SentinelStore } from './store.js'; describe('sentinel', () => { @@ -47,13 +50,12 @@ describe('sentinel', () => { let epoch: EpochNumber; let ts: bigint; let l1Constants: L1RollupConstants; - const config: Pick< - SlasherConfig, - 'slashInactivityTargetPercentage' | 'slashInactivityPenalty' | 'slashInactivityConsecutiveEpochThreshold' - > = { + const config: SentinelRuntimeConfig = { slashInactivityPenalty: 100n, slashInactivityTargetPercentage: 0.8, slashInactivityConsecutiveEpochThreshold: 1, + l1ChainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId, + l1Contracts: { rollupAddress: TEST_COORDINATION_SIGNATURE_CONTEXT.rollupAddress }, }; beforeEach(async () => { @@ -168,7 +170,7 @@ describe('sentinel', () => { publishedCheckpoint = await emitCheckpointEvent(checkpoint, checkpointAttestations); const attestorsFromCheckpoint = compactArray( - getAttestationInfoFromPublishedCheckpoint(publishedCheckpoint).map(info => + getAttestationInfoFromPublishedCheckpoint(publishedCheckpoint, TEST_COORDINATION_SIGNATURE_CONTEXT).map(info => info.status === 'recovered-from-signature' || info.status === 'provided-as-address' ? info.address : undefined, @@ -213,7 +215,10 @@ describe('sentinel', () => { // Verify that getAttestationInfoFromPublishedCheckpoint returns 4 entries total: // - 2 with status 'recovered-from-signature' (actual attestations with valid signatures) // - 2 with status 'provided-as-address' (placeholders for missing validators) - const attestationInfo = getAttestationInfoFromPublishedCheckpoint(publishedCheckpoint); + const attestationInfo = getAttestationInfoFromPublishedCheckpoint( + publishedCheckpoint, + TEST_COORDINATION_SIGNATURE_CONTEXT, + ); expect(attestationInfo).toHaveLength(4); const recoveredSignatures = attestationInfo.filter(info => info.status === 'recovered-from-signature'); const placeholders = attestationInfo.filter(info => info.status === 'provided-as-address'); @@ -946,10 +951,7 @@ class TestSentinel extends Sentinel { archiver: L2BlockSource, p2p: P2PClient, store: SentinelStore, - config: Pick< - SlasherConfig, - 'slashInactivityTargetPercentage' | 'slashInactivityPenalty' | 'slashInactivityConsecutiveEpochThreshold' - >, + config: SentinelRuntimeConfig, protected override blockStream: L2BlockStream, ) { super(epochCache, archiver, p2p, store, config); diff --git a/yarn-project/aztec-node/src/sentinel/sentinel.ts b/yarn-project/aztec-node/src/sentinel/sentinel.ts index be273b028d4f..a3b2ce0d4a62 100644 --- a/yarn-project/aztec-node/src/sentinel/sentinel.ts +++ b/yarn-project/aztec-node/src/sentinel/sentinel.ts @@ -21,7 +21,9 @@ import { type L2BlockStreamEventHandler, getAttestationInfoFromPublishedCheckpoint, } from '@aztec/stdlib/block'; +import type { ChainConfig } from '@aztec/stdlib/config'; import { getEpochAtSlot, getSlotRangeForEpoch, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; +import type { CoordinationSignatureContext } from '@aztec/stdlib/p2p'; import type { SingleValidatorStats, ValidatorStats, @@ -36,6 +38,12 @@ import EventEmitter from 'node:events'; import { SentinelStore } from './store.js'; +export type SentinelRuntimeConfig = Pick< + SlasherConfig, + 'slashInactivityTargetPercentage' | 'slashInactivityPenalty' | 'slashInactivityConsecutiveEpochThreshold' +> & + Pick; + /** Maps a validator status to its category: proposer or attestation. */ function statusToCategory(status: ValidatorStatusInSlot): ValidatorStatusType { switch (status) { @@ -65,10 +73,7 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme protected archiver: L2BlockSource, protected p2p: P2PClient, protected store: SentinelStore, - protected config: Pick< - SlasherConfig, - 'slashInactivityTargetPercentage' | 'slashInactivityPenalty' | 'slashInactivityConsecutiveEpochThreshold' - >, + protected config: SentinelRuntimeConfig, protected logger = createLogger('node:sentinel'), ) { super(); @@ -77,6 +82,13 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme this.runningPromise = new RunningPromise(this.work.bind(this), logger, interval); } + private getSignatureContext(): CoordinationSignatureContext { + return { + chainId: this.config.l1ChainId, + rollupAddress: this.config.l1Contracts.rollupAddress, + }; + } + public updateConfig(config: Partial) { this.config = { ...this.config, ...config }; } @@ -117,7 +129,7 @@ export class Sentinel extends (EventEmitter as new () => WatcherEmitter) impleme this.slotNumberToCheckpoint.set(checkpoint.checkpoint.header.slotNumber, { checkpointNumber: checkpoint.checkpoint.number, archive: checkpoint.checkpoint.archive.root.toString(), - attestors: getAttestationInfoFromPublishedCheckpoint(checkpoint) + attestors: getAttestationInfoFromPublishedCheckpoint(checkpoint, this.getSignatureContext()) .filter(a => a.status === 'recovered-from-signature') .map(a => a.address!), }); diff --git a/yarn-project/aztec.js/src/contract/wait_for_proven.ts b/yarn-project/aztec.js/src/contract/wait_for_proven.ts index 2d0254aa1b63..6ae9534ee0d8 100644 --- a/yarn-project/aztec.js/src/contract/wait_for_proven.ts +++ b/yarn-project/aztec.js/src/contract/wait_for_proven.ts @@ -28,7 +28,7 @@ export async function waitForProven(node: AztecNode, receipt: TxReceipt, opts?: } return await retryUntil( async () => { - const provenBlock = await node.getProvenBlockNumber(); + const provenBlock = await node.getBlockNumber('proven'); return provenBlock >= receipt.blockNumber! ? provenBlock : undefined; }, 'isProven', diff --git a/yarn-project/blob-client/src/client/config.ts b/yarn-project/blob-client/src/client/config.ts index 63e48c69f013..3ca63df7d965 100644 --- a/yarn-project/blob-client/src/client/config.ts +++ b/yarn-project/blob-client/src/client/config.ts @@ -93,7 +93,7 @@ export const blobClientConfigMapping: ConfigMappingsType = { blobSinkMapSizeKb: { env: 'BLOB_SINK_MAP_SIZE_KB', description: 'The maximum possible size of the blob sink DB in KB. Overwrites the general dataStoreMapSizeKb.', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), }, blobAllowEmptySources: { env: 'BLOB_ALLOW_EMPTY_SOURCES', @@ -116,7 +116,7 @@ export const blobClientConfigMapping: ConfigMappingsType = { blobHealthcheckUploadIntervalMinutes: { env: 'BLOB_HEALTHCHECK_UPLOAD_INTERVAL_MINUTES', description: 'Interval in minutes for uploading healthcheck file to file store (default: 60 = 1 hour)', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), }, l1HttpTimeoutMS: { env: 'ETHEREUM_HTTP_TIMEOUT_MS', diff --git a/yarn-project/cli/src/cmds/aztec_node/block_number.ts b/yarn-project/cli/src/cmds/aztec_node/block_number.ts index c9cd69c6ba37..cc2d90dc421f 100644 --- a/yarn-project/cli/src/cmds/aztec_node/block_number.ts +++ b/yarn-project/cli/src/cmds/aztec_node/block_number.ts @@ -3,7 +3,7 @@ import type { LogFn } from '@aztec/foundation/log'; export async function blockNumber(nodeUrl: string, log: LogFn) { const aztecNode = createAztecNodeClient(nodeUrl); - const [latestNum, provenNum] = await Promise.all([aztecNode.getBlockNumber(), aztecNode.getProvenBlockNumber()]); + const [latestNum, provenNum] = await Promise.all([aztecNode.getBlockNumber(), aztecNode.getBlockNumber('proven')]); log(`Latest block: ${latestNum}`); log(`Proven block: ${provenNum}`); } diff --git a/yarn-project/cli/src/utils/inspect.ts b/yarn-project/cli/src/utils/inspect.ts index 67eb46e9abc3..1943e274b408 100644 --- a/yarn-project/cli/src/utils/inspect.ts +++ b/yarn-project/cli/src/utils/inspect.ts @@ -9,14 +9,13 @@ export async function inspectBlock( log: LogFn, opts: { showTxs?: boolean } = {}, ) { - const block = await aztecNode.getBlock(blockNumber); + const block = await aztecNode.getBlock(blockNumber, { includeTransactions: opts.showTxs }); if (!block) { log(`No block found for block number ${blockNumber}`); return; } - const blockHash = await block.hash(); - log(`Block ${blockNumber} (${blockHash.toString()})`); + log(`Block ${blockNumber} (${block.hash.toString()})`); log(` Total fees: ${block.header.totalFees.toBigInt()}`); log(` Total mana used: ${block.header.totalManaUsed.toBigInt()}`); log( @@ -25,12 +24,12 @@ export async function inspectBlock( log(` Coinbase: ${block.header.globalVariables.coinbase}`); log(` Fee recipient: ${block.header.globalVariables.feeRecipient}`); log(` Timestamp: ${new Date(Number(block.header.globalVariables.timestamp) * 500)}`); - if (opts.showTxs) { + if (opts.showTxs && block.body) { log(``); for (const txHash of block.body.txEffects.map(tx => tx.txHash)) { await inspectTx(aztecNode, txHash, log, { includeBlockInfo: false }); } - } else { + } else if (block.body) { log(` Transactions: ${block.body.txEffects.length}`); } } diff --git a/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts b/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts index 246053f17b30..788a08ef828e 100644 --- a/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts +++ b/yarn-project/end-to-end/src/bench/node_rpc_perf.test.ts @@ -287,7 +287,7 @@ describe('e2e_node_rpc_perf', () => { }); it('benchmarks getProvenBlockNumber', async () => { - const { stats } = await benchmark('getProvenBlockNumber', () => aztecNode.getProvenBlockNumber()); + const { stats } = await benchmark('getProvenBlockNumber', () => aztecNode.getBlockNumber('proven')); addResult('getProvenBlockNumber', stats); expect(stats.avg).toBeLessThan(1000); }); @@ -324,16 +324,16 @@ describe('e2e_node_rpc_perf', () => { expect(stats.avg).toBeLessThan(5000); }); - it('benchmarks getBlockByArchive', async () => { - const { stats } = await benchmark('getBlockByArchive', () => aztecNode.getBlockByArchive(blockArchive)); + it('benchmarks getBlock by archive', async () => { + const { stats } = await benchmark('getBlockByArchive', () => + aztecNode.getBlock({ archive: blockArchive }, { includeTransactions: true }), + ); addResult('getBlockByArchive', stats); expect(stats.avg).toBeLessThan(3000); }); - it('benchmarks getBlockHeaderByArchive', async () => { - const { stats } = await benchmark('getBlockHeaderByArchive', () => - aztecNode.getBlockHeaderByArchive(blockArchive), - ); + it('benchmarks getBlock header by archive', async () => { + const { stats } = await benchmark('getBlockHeaderByArchive', () => aztecNode.getBlock({ archive: blockArchive })); addResult('getBlockHeaderByArchive', stats); expect(stats.avg).toBeLessThan(2000); }); diff --git a/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts b/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts index 9ff153020606..b78b7f79bd32 100644 --- a/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts +++ b/yarn-project/end-to-end/src/composed/ha/e2e_ha_full.test.ts @@ -26,6 +26,7 @@ import type { TestDateProvider } from '@aztec/foundation/timer'; import { GovernanceProposerAbi } from '@aztec/l1-artifacts/GovernanceProposerAbi'; import { StatefulTestContractArtifact } from '@aztec/noir-test-contracts.js/StatefulTest'; import { type AttestationInfo, getAttestationInfoFromPublishedCheckpoint } from '@aztec/stdlib/block'; +import { Checkpoint } from '@aztec/stdlib/checkpoint'; import type { GenesisData } from '@aztec/stdlib/world-state'; import type { ValidatorClient } from '@aztec/validator-client'; import { PostgresSlashingProtectionDatabase } from '@aztec/validator-ha-signer/db'; @@ -88,6 +89,10 @@ describe('HA Full Setup', () => { let governanceProposer: GovernanceProposerContract; /** Per-node initial keystore JSON (all 4 attesters, node's own publisher) for restore after reload test */ let initialKeystoreJsons: string[]; + const getSignatureContext = () => ({ + chainId: config.l1ChainId, + rollupAddress: deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }); beforeAll(async () => { // Check required environment variables @@ -349,8 +354,8 @@ describe('HA Full Setup', () => { logger.info(`Block contains ${block.block.body.txEffects.length} transaction(s)`); // get attestations from checkpoint - const [checkpoint] = await aztecNode.getCheckpoints(block.checkpointNumber, 1); - const attestations = checkpoint.attestations.filter(a => !a.signature.isEmpty()); + const [checkpoint] = await aztecNode.getCheckpoints(block.checkpointNumber, 1, { includeAttestations: true }); + const attestations = (checkpoint.attestations ?? []).filter(a => !a.signature.isEmpty()); // Should have enough attestations for quorum const quorum = Math.floor((COMMITTEE_SIZE * 2) / 3) + 1; @@ -607,8 +612,8 @@ describe('HA Full Setup', () => { }); expect(receipt.receipt.blockNumber).toBeDefined(); const [block] = await aztecNode.getCheckpointedBlocks(receipt.receipt.blockNumber!, 1); - const [cp] = await aztecNode.getCheckpoints(block!.checkpointNumber, 1); - const att = cp.attestations.filter(a => !a.signature.isEmpty()); + const [cp] = await aztecNode.getCheckpoints(block!.checkpointNumber, 1, { includeAttestations: true }); + const att = (cp.attestations ?? []).filter(a => !a.signature.isEmpty()); expect(att.length).toBeGreaterThanOrEqual(quorum); logger.info(`Phase 2: block ${receipt.receipt.blockNumber}, ${att.length} attestations (quorum ${quorum})`); } finally { @@ -779,11 +784,22 @@ describe('HA Full Setup', () => { ); // SECONDARY CHECK: Verify checkpoint attestations match database records - const [publishedCheckpoint] = await aztecNode.getCheckpoints(block.checkpointNumber, 1); - const attestationInfos = getAttestationInfoFromPublishedCheckpoint({ - attestations: publishedCheckpoint.attestations, - checkpoint: publishedCheckpoint.checkpoint, + const [publishedCheckpoint] = await aztecNode.getCheckpoints(block.checkpointNumber, 1, { + includeAttestations: true, }); + const attestationInfos = getAttestationInfoFromPublishedCheckpoint( + { + attestations: publishedCheckpoint.attestations ?? [], + checkpoint: new Checkpoint( + publishedCheckpoint.archive, + publishedCheckpoint.header, + [], + publishedCheckpoint.number, + publishedCheckpoint.feeAssetPriceModifier, + ), + }, + getSignatureContext(), + ); // Filter to only valid attestations with recovered addresses const validAttestations = attestationInfos.filter( diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 1904675cb6bb..33a18f8ce2ff 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -587,7 +587,7 @@ describe('e2e_block_building', () => { await Promise.race(txHashes.map(txHash => waitForTx(aztecNode, txHash, { timeout: 60 }))); logger.warn(`At least one tx has been mined`); - const lastBlock = await context.aztecNode.getBlockHeader(); + const lastBlock = await context.aztecNode.getBlockHeader('latest'); expect(lastBlock).toBeDefined(); logger.warn(`Latest block is ${lastBlock!.getBlockNumber()}`, { state: lastBlock?.state.partial }); @@ -621,7 +621,7 @@ describe('e2e_block_building', () => { await cheatCodes.rollup.advanceToNextEpoch(); const bn = await aztecNode.getBlockNumber(); - while ((await aztecNode.getProvenBlockNumber()) < bn) { + while ((await aztecNode.getBlockNumber('proven')) < bn) { await sleep(1000); } diff --git a/yarn-project/end-to-end/src/e2e_bot.test.ts b/yarn-project/end-to-end/src/e2e_bot.test.ts index f5e02398f6de..381a22dfd889 100644 --- a/yarn-project/end-to-end/src/e2e_bot.test.ts +++ b/yarn-project/end-to-end/src/e2e_bot.test.ts @@ -288,7 +288,7 @@ describe('e2e_bot', () => { expect(receipt.blockNumber).toBeDefined(); // Verify L2→L1: the block should contain at least one non-zero L2→L1 message - const block = await aztecNode.getBlock(receipt.blockNumber!); + const block = await aztecNode.getBlock(receipt.blockNumber!, { includeTransactions: true }); expect(block).toBeDefined(); const l2ToL1Msgs = block!.body.txEffects.flatMap(e => e.l2ToL1Msgs).filter(m => !m.isZero()); expect(l2ToL1Msgs.length).toBeGreaterThanOrEqual(1); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts index 9a5a866be510..b2e7aa800d04 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts @@ -98,7 +98,7 @@ export class CrossChainMessagingTest { async catchUpProvenChain() { const bn = await this.aztecNode.getBlockNumber(); - while ((await this.aztecNode.getProvenBlockNumber()) < bn) { + while ((await this.aztecNode.getBlockNumber('proven')) < bn) { await sleep(1000); } } diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts index 3f12389109ac..1f8a1ab77649 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts @@ -62,7 +62,7 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { const waitForBlockToCheckpoint = async (blockNumber: BlockNumber) => { return await retryUntil( async () => { - const checkpointedBlockNumber = await aztecNode.getCheckpointedBlockNumber(); + const checkpointedBlockNumber = await aztecNode.getBlockNumber('checkpointed'); const isCheckpointed = checkpointedBlockNumber >= blockNumber; if (!isCheckpointed) { return undefined; diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts index 1874f64e75d0..6c123772337b 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts @@ -71,7 +71,7 @@ describe('e2e_cross_chain_messaging l2_to_l1', () => { await t.advanceToEpochProven(txReceipt); // Check that the block contains the 2 messages. - const block = (await aztecNode.getBlock(blockNumber))!; + const block = (await aztecNode.getBlock(blockNumber, { includeTransactions: true }))!; const l2ToL1Messages = block.body.txEffects.flatMap(txEffect => txEffect.l2ToL1Msgs); expect(l2ToL1Messages).toStrictEqual([computeMessageLeaf(messages[0]), computeMessageLeaf(messages[1])]); @@ -130,7 +130,7 @@ describe('e2e_cross_chain_messaging l2_to_l1', () => { // Check that the block contains all the messages. { - const block = (await aztecNode.getBlock(blockNumber))!; + const block = (await aztecNode.getBlock(blockNumber, { includeTransactions: true }))!; const messagesForAllTxs = block.body.txEffects.map(txEffect => txEffect.l2ToL1Msgs); // We cannot guarantee the order of txs in a block, so we rearrange the leaves if call1 was rolled up first. const [firstTx, secondTx] = messagesForAllTxs[0].length === 3 ? [tx0, tx1] : [tx1, tx0]; diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts index 2585f59a5782..cb0a5b7cb54b 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts @@ -123,7 +123,9 @@ describe('e2e_deploy_contract contract class registration', () => { // Contract instance deployed event is emitted via private logs. const blockNumber = await aztecNode.getBlockNumber(); - const logs = (await aztecNode.getBlock(blockNumber))!.getPrivateLogs(); + const logs = (await aztecNode.getBlock(blockNumber, { includeTransactions: true }))!.body.txEffects.flatMap( + t => t.privateLogs, + ); expect(logs.length).toBe(1); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_equivocation.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_equivocation.test.ts new file mode 100644 index 000000000000..d1d5aa747698 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_equivocation.test.ts @@ -0,0 +1,279 @@ +import type { AztecNodeService } from '@aztec/aztec-node'; +import { EthAddress } from '@aztec/aztec.js/addresses'; +import { Fr } from '@aztec/aztec.js/fields'; +import type { Logger } from '@aztec/aztec.js/log'; +import { asyncMap } from '@aztec/foundation/async-map'; +import { CheckpointNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { times } from '@aztec/foundation/collection'; +import { SecretValue } from '@aztec/foundation/config'; +import { retryUntil } from '@aztec/foundation/retry'; +import { bufferToHex } from '@aztec/foundation/string'; +import { getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; +import { tryStop } from '@aztec/stdlib/interfaces/server'; + +import { jest } from '@jest/globals'; +import { privateKeyToAccount } from 'viem/accounts'; + +import { getPrivateKeyFromIndex } from '../fixtures/utils.js'; +import { EpochsTestContext } from './epochs_test.js'; + +jest.setTimeout(1000 * 60 * 15); + +const NODE_COUNT = 4; + +/** + * E2E test for the equivocation recovery scenario under proposer pipelining. + * + * Two conflicting checkpoint proposals are gossiped during the same slot: + * - Node A (holds all 4 validator keys) publishes the "real" checkpoint to L1 + * but never broadcasts via gossipsub (`skipBroadcastProposals + skipIncomingProposals`). + * - The "X" node (B or C, whichever holds the slot proposer's key) broadcasts an + * alternative checkpoint that reaches B/C/D via gossipsub but never lands on L1 + * (`skipPublishingCheckpointsPercent: 100`). + * + * The test verifies that L1 sync overrides the gossip-only proposal on all observer + * nodes (B, C, D) once A's L1-confirmed checkpoint propagates via the archiver. + */ +describe('e2e_epochs/epochs_equivocation', () => { + let logger: Logger; + let test: EpochsTestContext; + let nodes: AztecNodeService[]; + + afterEach(async () => { + jest.restoreAllMocks(); + await test?.teardown(); + }); + + it('L1-confirmed checkpoint overrides gossip-only equivocating proposal', async () => { + // Build 4 validators (V1..V4) using getPrivateKeyFromIndex(i+3), same convention as other epoch tests. + const validators = times(NODE_COUNT, i => { + const privateKey = bufferToHex(getPrivateKeyFromIndex(i + 3)!); + const attester = EthAddress.fromString(privateKeyToAccount(privateKey).address); + return { attester, withdrawer: attester, privateKey, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) }; + }); + + // Timing calculation for 3 blocks per checkpoint with 8s sub-slots: + // - initializationOffset = 0.5s (test mode, ethereumSlotDuration < 8) + // - 3 blocks x 8s = 24s + // - checkpointFinalization = 0.5s (assemble) + 0 (p2p in test) + 2s (L1 publish) = 2.5s + // - finalBlockDuration = 8s (re-execution) + // - Total: 0.5 + 24 + 8 + 2.5 = 35s => use 36s + test = await EpochsTestContext.setup({ + numberOfAccounts: 0, + initialValidators: validators, + enableProposerPipelining: true, + inboxLag: 2, + mockGossipSubNetwork: true, + disableAnvilTestWatcher: true, + startProverNode: false, + aztecEpochDuration: 4, + aztecProofSubmissionEpochs: 1024, + enforceTimeTable: true, + ethereumSlotDuration: 6, + aztecSlotDuration: 36, + blockDurationMs: 8000, + attestationPropagationTime: 0.5, + l1PublishingTime: 2, + aztecTargetCommitteeSize: 4, + skipInitialSequencer: true, + }); + + logger = test.logger; + + // We set different coinbase addresses so different nodes produce different blocks + const coinbaseA = EthAddress.fromNumber(0xa); + const coinbaseB = EthAddress.fromNumber(0xb); + const coinbaseC = EthAddress.fromNumber(0xc); + + // The private keys held by each node: + // A: all 4 keys → self-attests with all validators, reaches quorum without inbound attestations + // B: V1 + V2 + // C: V3 + V4 + // D: no validator keys (RPC-only observer) + const keysA = validators.map(v => v.privateKey as `0x${string}`); + const keysB = [validators[0].privateKey, validators[1].privateKey] as `0x${string}`[]; + const keysC = [validators[2].privateKey, validators[3].privateKey] as `0x${string}`[]; + + // All sequencers start with dontStartSequencer so we can warp the clock first. + nodes = await asyncMap( + [ + { + keys: keysA, + coinbase: coinbaseA, + extraOpts: { skipIncomingProposals: true, skipBroadcastProposals: true }, + }, + { + keys: keysB, + coinbase: coinbaseB, + extraOpts: { skipPublishingCheckpointsPercent: 100 }, + }, + { + keys: keysC, + coinbase: coinbaseC, + extraOpts: { skipPublishingCheckpointsPercent: 100 }, + }, + ], + ({ keys, coinbase, extraOpts }) => + test.createValidatorNode(keys, { + dontStartSequencer: true, + coinbase, + buildCheckpointIfEmpty: true, + minTxsPerBlock: 0, + ...extraOpts, + }), + ); + + // Node D: non-validator observer node + const nodeD = await test.createNonValidatorNode({ buildCheckpointIfEmpty: true, minTxsPerBlock: 0 }); + nodes.push(nodeD); + const [nodeB, nodeC] = nodes.slice(1); + + logger.warn('All nodes created', { nodes: nodes.length }); + + // Determine the next proposer slot by scanning upcoming slots. + // Since A holds all 4 keys and B/C each hold 2, the slot proposer is always held by A + // and exactly one of B or C. We identify which one (X) and use its coinbase in assertions. + const { slot: currentSlot } = test.epochCache.getEpochAndSlotNow(); + // Pick a target slot 2 ahead so there's room for the pipelining build window to engage. + // With pipelining, the sequencer builds slot (targetSlot+1) while the clock is at targetSlot, + // so the proposer we care about is for targetSlot+1 (the submission slot). + const targetSlot = SlotNumber(currentSlot + 2); + const submissionSlot = SlotNumber(targetSlot + 1); + + const attesterAddresses = validators.map(v => EthAddress.fromString(privateKeyToAccount(v.privateKey).address)); + logger.warn('Validator attester addresses', { + V1: attesterAddresses[0], + V2: attesterAddresses[1], + V3: attesterAddresses[2], + V4: attesterAddresses[3], + }); + logger.warn('Validator-to-node assignment', { A: 'V1,V2,V3,V4', B: 'V1,V2', C: 'V3,V4', D: 'none' }); + + const proposerAttester = await test.epochCache.getProposerAttesterAddressInSlot(submissionSlot); + if (!proposerAttester) { + throw new Error(`No proposer found for slot ${submissionSlot}`); + } + logger.warn(`Expected proposer for submission slot`, { submissionSlot, proposerAttester }); + + // Warp to one L1 slot before the target L2 slot so pipelining's build window engages. + const slotStartTimestamp = getTimestampForSlot(targetSlot, test.constants); + const warpTo = slotStartTimestamp - BigInt(test.L1_BLOCK_TIME_IN_S); + logger.warn(`Warping to L1 timestamp ${warpTo} (one L1 slot before L2 slot ${targetSlot})`); + await test.context.cheatCodes.eth.warp(Number(warpTo), { resetBlockInterval: true }); + + // Start all sequencers now that the clock is warped. + const sequencers = nodes.slice(0, 3).map(n => n.getSequencer()!); + const { failEvents } = test.watchSequencerEvents(sequencers, i => ({ validator: ['A', 'B', 'C'][i] })); + await Promise.all(sequencers.map(s => s.start())); + logger.warn('All sequencers started'); + + // Wait until each of B, C, D sees a proposed block for submissionSlot with coinbase B or C. + // This confirms the gossip-only equivocating proposal from B or C has propagated. + // REFACTOR: This is candidate for a "wait until all nodes see a block with these properties" helper in the test context. + const gossipTimeout = test.L2_SLOT_DURATION_IN_S * 4; + await Promise.all( + [nodeB, nodeC, nodeD].map(async (node, idx) => { + const nodeName = ['B', 'C', 'D'][idx]; + let observedCoinbase: EthAddress | undefined; + await retryUntil( + async () => { + const block = await node.getBlock('proposed'); + if (!block) { + return false; + } + const slot = block.header.globalVariables.slotNumber; + const cb = block.header.globalVariables.coinbase; + if (slot === submissionSlot && (cb.equals(coinbaseB) || cb.equals(coinbaseC))) { + observedCoinbase = cb; + return true; + } + return false; + }, + `${nodeName} sees gossip-only proposed block for slot ${submissionSlot}`, + gossipTimeout, + 0.5, + ); + logger.warn(`Node ${nodeName} observed gossip-only coinbase for slot ${submissionSlot}`, { observedCoinbase }); + }), + ); + + // Now wait until each of B, C, D has a checkpointed block for submissionSlot with coinbaseA. + // This confirms A's L1-confirmed checkpoint has overridden the gossip-only proposal. + // REFACTOR: This is candidate for a "wait until all nodes see a block with these properties" helper in the test context. + const overrideTimeout = test.L2_SLOT_DURATION_IN_S * 4; + logger.warn(`Waiting for L1-sync override on B, C, D (timeout=${overrideTimeout}s)`); + await Promise.all( + [nodeB, nodeC, nodeD].map(async (node, idx) => { + const nodeName = ['B', 'C', 'D'][idx]; + await retryUntil( + async () => { + const block = await node.getBlock('checkpointed'); + if (!block) { + return false; + } + const slot = block.header.globalVariables.slotNumber; + const cb = block.header.globalVariables.coinbase; + return slot >= submissionSlot && cb.equals(coinbaseA); + }, + `${nodeName} checkpointed block for slot ${submissionSlot} with coinbaseA`, + overrideTimeout, + 0.5, + ); + }), + ); + + // Assert no spurious failures on B, C. + // Node A (index 2) generates lots of proposer-rollup-check-failed noise because it has + // skipIncomingProposals set and can't build a valid checkpoint for slot 2. + // Nodes B (index 3) and C (index 4) generate checkpoint-publish-failed at the submission slot + // because skipPublishingCheckpointsPercent: 100 causes their publish to be intentionally skipped. + const observerFailEvents = failEvents.filter( + e => + e.sequencerIndex !== 2 && // node A + !(e.type === 'proposer-rollup-check-failed' && e.reason === 'Rollup contract check failed') && + !(e.type === 'checkpoint-publish-failed' && e.slot === submissionSlot), // expected skip-publish from B/C + ); + if (observerFailEvents.length > 0) { + logger.error('Unexpected fail events on observer sequencers', observerFailEvents); + } + expect(observerFailEvents).toEqual([]); + + // Then heal. Stop A, re-enable checkpoint publishing on B and C, expect chain to advance. + logger.warn('Stopping node A and re-enabling publishing on B and C'); + await tryStop(nodes[0], logger); + + const baseline = test.monitor.checkpointNumber; + logger.warn(`Checkpoint baseline after equivocation: ${baseline}`); + + await nodes[1].setConfig({ skipPublishingCheckpointsPercent: 0 }); + await nodes[2].setConfig({ skipPublishingCheckpointsPercent: 0 }); + + const healTarget = CheckpointNumber(baseline + 2); + const healTimeout = test.L2_SLOT_DURATION_IN_S * 8; + logger.warn(`Waiting for checkpoint ${healTarget} (timeout=${healTimeout}s)`); + await test.waitUntilCheckpointNumber(healTarget, healTimeout); + + expect(test.monitor.checkpointNumber).toBeGreaterThanOrEqual(healTarget); + logger.warn(`Network healed: checkpoint ${test.monitor.checkpointNumber}`); + + // REFACTOR: This is candidate for a "wait until all nodes sync to a chain tip with these properties" helper in the test context. + await Promise.all( + [nodeB, nodeC, nodeD].map((node, idx) => + retryUntil( + async () => { + const tips = await node.getL2Tips(); + return tips.checkpointed.checkpoint.number >= healTarget; + }, + `${'BCD'[idx]} synced to checkpoint ${healTarget}`, + healTimeout, + 0.5, + ), + ), + ); + + // TODO(A-980): assert the equivocating proposer of the first slot is eventually slashed + // for the DUPLICATE_PROPOSAL offense. Slasher is currently disabled in the harness + // (slasherEnabled: false) and enabling it requires plumbing offense submission and + // waiting for the slasher's offense window. + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_high_tps_block_building.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_high_tps_block_building.test.ts index 0bdcba6fee4d..d70747193f1d 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_high_tps_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_high_tps_block_building.test.ts @@ -7,11 +7,13 @@ import type { Logger } from '@aztec/aztec.js/log'; import { waitForTx } from '@aztec/aztec.js/node'; import type { Operator } from '@aztec/ethereum/deploy-aztec-l1-contracts'; import { asyncMap } from '@aztec/foundation/async-map'; -import { BlockNumber } from '@aztec/foundation/branded-types'; -import { times, timesAsync } from '@aztec/foundation/collection'; +import { BlockNumber, SlotNumber } from '@aztec/foundation/branded-types'; +import { chunkBy, times, timesAsync } from '@aztec/foundation/collection'; import { SecretValue } from '@aztec/foundation/config'; +import { sleepUntil } from '@aztec/foundation/sleep'; import { bufferToHex } from '@aztec/foundation/string'; import type { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; +import { getSlotAtTimestamp, getTimestampForSlot } from '@aztec/stdlib/epoch-helpers'; import { jest } from '@jest/globals'; import { privateKeyToAccount } from 'viem/accounts'; @@ -24,21 +26,47 @@ jest.setTimeout(1000 * 60 * 10); const NODE_COUNT = 3; -// We send 8 txs total, each taking several seconds to process (see sequencerFakeDelayPerTxMs), with a total -// L2 slot time of 24s, with l1PublishingTime set to the full 8s L1 slot duration and attestationPropagationTime of 1s. -// This leaves us with a few seconds for executing txs. This test will check that proposers honor the timetable -// and do not try to include more than N txs per block. Should we ever implement preemptive block building, -// sequencers will end up with more time, so we'll need to bump the EXPECTED_MAX_TXS_PER_BLOCK value. -const TX_COUNT = 8; +// Multi-block-per-slot test under pipelining. Exercises a full checkpoint (4 blocks × 2 txs) and verifies the +// checkpoint tx lands on the 2nd L1 block of its target slot. +// +// Config: aztecSlotDuration=36s, ethereumSlotDuration=12s (3 L1 blocks / L2 slot), blockDuration=6s, +// fakeProcessingDelayPerTxMs=2500ms, attestationPropagationTime=1s, l1PublishingTime=12s, +// txDelayerMaxInclusionTimeIntoSlot=1s. +// +// Time inside a build slot (36s total): +// T=0-1 (1s) init (checkpointInitializationTime) +// T=1-7 (6s) block 1 ── 2 txs × 2.5s = 5s, fits in 6s block budget +// T=7-13 (6s) block 2 +// T=13-19 (6s) block 3 +// T=19-25 (6s) block 4 +// T=25-26 (1s) checkpoint assemble +// T=26-27 (1s) proposal out (p2pPropagationTime) ┐ +// T=27-33 (6s) validators re-execute last block │ timeReservedAtEnd = 9s +// T=33-34 (1s) attestations back (p2pPropagationTime) ┘ +// T=34-36 (2s) slack +// +// At target-slot start (T=0 of target slot) the proposer submits the L1 propose tx. With +// txDelayerMaxInclusionTimeIntoSlot=1s, it falls inside the current L1 slot window and lands in the next +// L1 block — the 2nd L1 block of the target slot (offset=1). It can also land in the 1st L1 block (offset=0) +// if attestations arrive fast enough that the proposer submits inside the last second of the build slot. +// Expected mining layout for a target slot: +// +// T=0 T=12 T=24 T=36 +// ├──────────────────┼──────────────────┼──────────────────┤ +// │ 1st L1 block │ 2nd L1 block │ 3rd L1 block │ +// │ ← fast submit │ ← typical │ │ +// +const BLOCKS_PER_CHECKPOINT = 4; +const TXS_PER_BLOCK = 2; +const CHECKPOINTS_TO_CHECK = 3; +// Extra txs beyond the ones we assert on: one partial checkpoint at startup (sequencers start mid-slot with +// only one blockDuration of slack) plus a buffer at the tail. +const TX_COUNT = BLOCKS_PER_CHECKPOINT * TXS_PER_BLOCK * (CHECKPOINTS_TO_CHECK + 1); const TX_DURATION_MS = 2500; -const EXPECTED_MAX_TXS_PER_BLOCK = 3; - -// Test that sequencers and validators can handle a large backlog of transactions. -// Spawns NODE_COUNT validator nodes, connected via a mocked gossip sub network. -// Introduces an arbitrary delay to public tx simulation to fake long processing times, -// then spams the network with TX_COUNT transactions. In addition, uses the l1 tx -// delayer to fake long L1 tx inclusion times, so sending a tx immediately before an L1 -// block is mined does not get it included, like in an actual network. +const BLOCK_DURATION_MS = 6000; +const L2_SLOT_DURATION_S = 36; +const L1_BLOCK_TIME_S = 12; + describe('e2e_epochs/epochs_high_tps_block_building', () => { let context: EndToEndContext; let logger: Logger; @@ -56,7 +84,6 @@ describe('e2e_epochs/epochs_high_tps_block_building', () => { return { attester, withdrawer: attester, privateKey, bn254SecretKey: new SecretValue(Fr.random().toBigInt()) }; }); - // Setup context with the given set of validators, no reorgs, mocked gossip sub network, and no anvil test watcher. test = await EpochsTestContext.setup({ numberOfAccounts: 0, initialValidators: validators, @@ -65,14 +92,17 @@ describe('e2e_epochs/epochs_high_tps_block_building', () => { aztecProofSubmissionEpochs: 1024, startProverNode: false, enforceTimeTable: true, - ethereumSlotDuration: 8, - l1PublishingTime: 8, - aztecSlotDuration: 24, + ethereumSlotDuration: L1_BLOCK_TIME_S, + l1PublishingTime: L1_BLOCK_TIME_S, + aztecSlotDuration: L2_SLOT_DURATION_S, + blockDurationMs: BLOCK_DURATION_MS, fakeProcessingDelayPerTxMs: TX_DURATION_MS, attestationPropagationTime: 1, minTxsPerBlock: 1, maxTxsPerBlock: 100, skipInitialSequencer: true, + enableProposerPipelining: true, + inboxLag: 2, }); ({ context, logger } = test); @@ -98,45 +128,98 @@ describe('e2e_epochs/epochs_high_tps_block_building', () => { }); it('builds blocks without any errors', async () => { - // Create and submit several txs + // Pre-prove and send all txs so the proposer has a full backlog ready in the pool when it starts building. const txs = await timesAsync(TX_COUNT, i => proveInteraction(context.wallet, contract.methods.spam(i, 1n, false), { from }), ); const txHashes = await Promise.all(txs.map(tx => tx.send({ wait: NO_WAIT }))); - logger.warn(`Sent ${txHashes.length} transactions`, { - txs: txHashes, - }); + logger.warn(`Sent ${txHashes.length} transactions`, { txs: txHashes }); const sequencers = nodes.map(node => node.getSequencer()!); const { failEvents } = test.watchSequencerEvents(sequencers, i => ({ validator: validators[i].attester })); - // Start the sequencers! + // Wait until `ethereumSlotDuration + blockDuration` seconds before the L2 target slot boundary before + // starting the sequencers. The sequencer's timetable treats the build window for slot N as starting at + // `slotStart(N) - ethereumSlotDuration` (see `getSlotStartBuildTimestamp` in `stdlib/src/epoch-helpers`), + // so we need at least one ethereum slot of lead on top of one blockDuration to guarantee that sub-slot 1 + // of the first build slot is reachable (and hence the first checkpoint is fully filled). + const leadSeconds = test.L1_BLOCK_TIME_IN_S + BLOCK_DURATION_MS / 1000; + const currentL1Block = await test.l1Client.getBlock({ blockTag: 'latest' }); + const currentSlot = getSlotAtTimestamp(currentL1Block.timestamp, test.constants); + let targetSlot = SlotNumber(currentSlot + 1); + let startSequencersAt = new Date( + Number(getTimestampForSlot(targetSlot, test.constants)) * 1000 - leadSeconds * 1000, + ); + if (startSequencersAt.getTime() <= context.dateProvider.now()) { + targetSlot = SlotNumber(targetSlot + 1); + startSequencersAt = new Date(Number(getTimestampForSlot(targetSlot, test.constants)) * 1000 - leadSeconds * 1000); + } + logger.warn( + `Waiting until ${startSequencersAt.toISOString()} (${leadSeconds}s before L2 slot ${targetSlot} starts)`, + ); + await sleepUntil(startSequencersAt, context.dateProvider.nowAsDate()); + await Promise.all(sequencers.map(sequencer => sequencer.start())); logger.warn(`Started all sequencers`); - // Wait until all txs are mined - const timeout = test.L2_SLOT_DURATION_IN_S * (TX_COUNT + 3); + // Wait until all txs are mined. + const timeout = test.L2_SLOT_DURATION_IN_S * (CHECKPOINTS_TO_CHECK * 2 + 8); await Promise.all(txHashes.map(txHash => waitForTx(context.aztecNode, txHash, { timeout }))); logger.warn(`All txs have been mined`); - // Check all blocks mined by the sequencers have under the expected max number of transactions. + // Fetch the blocks and group contiguous blocks by checkpoint number. For the first CHECKPOINTS_TO_CHECK + // checkpoints whose target slot is at or after the slot we waited for, assert every checkpoint is fully + // filled (BLOCKS_PER_CHECKPOINT blocks × TXS_PER_BLOCK txs each) and the checkpoint tx landed in the 1st + // or 2nd L1 block of the target slot. const blocks = await nodes[0].getCheckpointedBlocks(BlockNumber(1), 50); - for (const block of blocks) { + const ethereumSlotDuration = test.L1_BLOCK_TIME_IN_S; + const checkpoints = chunkBy(blocks, b => Number(b.checkpointNumber)); + let checkedFullCheckpoints = 0; + for (const checkpointBlocks of checkpoints) { + const first = checkpointBlocks[0]; + const slotStartTimestamp = getTimestampForSlot(first.block.slot, test.constants); + const l1OffsetInSlot = Number(first.l1.timestamp - slotStartTimestamp) / ethereumSlotDuration; logger.warn( - `Block ${block.block.number} was mined at L1 ${block.l1.blockNumber} with ${block.block.body.txEffects.length} transactions`, - { transactions: block.block.body.txEffects.map(tx => tx.txHash) }, + `Checkpoint ${first.checkpointNumber} (target slot ${first.block.slot}) mined at L1 block ${first.l1.blockNumber} ` + + `(offset ${l1OffsetInSlot} into L2 slot) with ${checkpointBlocks.length} blocks`, + { + blocks: checkpointBlocks.map(b => ({ number: b.block.number, txs: b.block.body.txEffects.length })), + }, ); - } - for (const block of blocks) { - expect(block.block.body.txEffects.length).toBeLessThanOrEqual(EXPECTED_MAX_TXS_PER_BLOCK); + if (first.block.slot < targetSlot || checkedFullCheckpoints >= CHECKPOINTS_TO_CHECK) { + continue; + } + + // We don't test for exactly BLOCKS_PER_CHECKPOINT since CI delays make this flakey + expect(checkpointBlocks.length).toBeGreaterThanOrEqual(BLOCKS_PER_CHECKPOINT - 1); + + for (const block of checkpointBlocks) { + // We don't test for exactly TXS_PER_BLOCK since CI delays make this flakey + const txCount = block.block.body.txEffects.length; + expect(txCount).toBeGreaterThanOrEqual(1); + expect(txCount).toBeLessThanOrEqual(TXS_PER_BLOCK); + } + expect([0, 1]).toContain(l1OffsetInSlot); + checkedFullCheckpoints++; } - // Expect no failures from sequencers during block building. - // The following error is marked as a flake on the test ignore patterns, - // so we can have this test run for a while before it breaks CI on a recoverable error. - if (failEvents.length > 0) { - logger.error(`Failed events from sequencers`, failEvents); + // Check that we've gone through all checkpoints, and at least one checkpoint reached + // expected number of blocks, and at least one block reached the expected number of txs. + expect(checkedFullCheckpoints).toBe(CHECKPOINTS_TO_CHECK); + expect(Math.max(...blocks.map(b => b.block.body.txEffects.length))).toEqual(TXS_PER_BLOCK); + expect(Math.max(...checkpoints.map(c => c.length))).toEqual(BLOCKS_PER_CHECKPOINT); + + // Expect no failures from sequencers during block building. Filter out the self-proposal 'Rollup contract + // check failed' spam: when a validator proposes two consecutive checkpoints, the archiver's sequentiality + // guard rejects persisting the second proposed checkpoint until the first is confirmed on L1, so the next + // pipelining cycle falls through without simulation overrides and canProposeAt reverts until state catches + // up. Tracked in A-910. + const significantFailEvents = failEvents.filter( + e => !(e.type === 'proposer-rollup-check-failed' && e.reason === 'Rollup contract check failed'), + ); + if (significantFailEvents.length > 0) { + logger.error(`Failed events from sequencers`, significantFailEvents); } - expect(failEvents).toEqual([]); + expect(significantFailEvents).toEqual([]); }); }); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts index 91d410742e59..3edbccf56373 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_multi_proof.test.ts @@ -91,7 +91,7 @@ describe('e2e_epochs/epochs_multi_proof', () => { 120, ); - const provenBlockNumber = await context.aztecNode.getProvenBlockNumber(); + const provenBlockNumber = await context.aztecNode.getBlockNumber('proven'); expect(provenBlockNumber).toEqual(firstEpochLastBlockNum); logger.info(`Test succeeded`); diff --git a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts index 4635f226abd9..0d9b27000373 100644 --- a/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs/epochs_proof_public_cross_chain.test.ts @@ -78,7 +78,7 @@ describe('e2e_epochs/epochs_proof_public_cross_chain', () => { logger.warn(`Waiting for proof for tx ${txReceipt.txHash} mined at ${txReceipt.blockNumber!}`); await retryUntil( async () => { - const provenBlockNumber = await context.aztecNode.getProvenBlockNumber(); + const provenBlockNumber = await context.aztecNode.getBlockNumber('proven'); logger.info(`Proven block number is ${provenBlockNumber}`); return provenBlockNumber >= txReceipt.blockNumber!; }, @@ -86,7 +86,7 @@ describe('e2e_epochs/epochs_proof_public_cross_chain', () => { test.L2_SLOT_DURATION_IN_S * test.epochDuration * 3, ); - const provenBlockNumber = await context.aztecNode.getProvenBlockNumber(); + const provenBlockNumber = await context.aztecNode.getBlockNumber('proven'); expect(provenBlockNumber).toBeGreaterThanOrEqual(txReceipt.blockNumber!); // Should not be able to consume the message again. diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 1fde8fe14556..35f8a50df2a0 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -222,7 +222,9 @@ describe('Logs', () => { .send({ from: account1Address }); // Fetch raw private logs for that block and check tag uniqueness - const logs = (await aztecNode.getBlock(tx.blockNumber!))!.getPrivateLogs().filter(l => !l.isEmpty()); + const logs = (await aztecNode.getBlock(tx.blockNumber!, { includeTransactions: true }))!.body.txEffects + .flatMap(t => t.privateLogs) + .filter(l => !l.isEmpty()); expect(logs.length).toBe(tx1NumLogs); @@ -243,7 +245,9 @@ describe('Logs', () => { const blockNumber = tx.blockNumber!; // Fetch raw private logs for that block and check tag uniqueness - const logs = (await aztecNode.getBlock(blockNumber))!.getPrivateLogs().filter(l => !l.isEmpty()); + const logs = (await aztecNode.getBlock(blockNumber, { includeTransactions: true }))!.body.txEffects + .flatMap(t => t.privateLogs) + .filter(l => !l.isEmpty()); expect(logs.length).toBe(tx2NumLogs); diff --git a/yarn-project/end-to-end/src/e2e_expiration_timestamp.test.ts b/yarn-project/end-to-end/src/e2e_expiration_timestamp.test.ts index 18240f671298..8ba0b26759bd 100644 --- a/yarn-project/end-to-end/src/e2e_expiration_timestamp.test.ts +++ b/yarn-project/end-to-end/src/e2e_expiration_timestamp.test.ts @@ -34,7 +34,7 @@ describe('e2e_expiration_timestamp', () => { let expirationTimestamp: bigint; beforeEach(async () => { - const header = await aztecNode.getBlockHeader(); + const header = await aztecNode.getBlockHeader('latest'); if (!header) { throw new Error('Block header not found in the setup of e2e_expiration_timestamp.test.ts'); } @@ -87,7 +87,7 @@ describe('e2e_expiration_timestamp', () => { let expirationTimestamp: bigint; beforeEach(async () => { - const header = await aztecNode.getBlockHeader(); + const header = await aztecNode.getBlockHeader('latest'); if (!header) { throw new Error('Block header not found in the setup of e2e_expiration_timestamp.test.ts'); } @@ -142,7 +142,7 @@ describe('e2e_expiration_timestamp', () => { let expirationTimestamp: bigint; beforeEach(async () => { - const header = await aztecNode.getBlockHeader(); + const header = await aztecNode.getBlockHeader('latest'); if (!header) { throw new Error('Block header not found in the setup of e2e_expiration_timestamp.test.ts'); } diff --git a/yarn-project/end-to-end/src/e2e_fees/fee_settings.test.ts b/yarn-project/end-to-end/src/e2e_fees/fee_settings.test.ts index c88940adcb67..2c025c2cb526 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fee_settings.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fee_settings.test.ts @@ -67,7 +67,7 @@ describe('e2e_fees fee settings', () => { const getCurrentMinFeesAfterCheckpoint = async (checkpointedBlock: BlockNumber) => { return await retryUntil( async () => { - const currentCheckpointedBlock = await aztecNode.getCheckpointedBlockNumber(); + const currentCheckpointedBlock = await aztecNode.getBlockNumber('checkpointed'); if (currentCheckpointedBlock < checkpointedBlock) { return undefined; } diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index f9ea83a93fb3..478687371913 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -134,7 +134,7 @@ export class FeesTest { async catchUpProvenChain() { const bn = await this.aztecNode.getBlockNumber(); - while ((await this.aztecNode.getProvenBlockNumber()) < bn) { + while ((await this.aztecNode.getBlockNumber('proven')) < bn) { await sleep(1000); } } diff --git a/yarn-project/end-to-end/src/e2e_keys.test.ts b/yarn-project/end-to-end/src/e2e_keys.test.ts index 9e56bb423b87..c566e6ef525f 100644 --- a/yarn-project/end-to-end/src/e2e_keys.test.ts +++ b/yarn-project/end-to-end/src/e2e_keys.test.ts @@ -7,7 +7,6 @@ import { DomainSeparator, INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto/poseidon'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; -import type { L2Block } from '@aztec/stdlib/block'; import { siloNullifier } from '@aztec/stdlib/hash'; import { computeAppNullifierHidingKey, @@ -90,14 +89,12 @@ describe('Keys', () => { const getNumNullifiedNotes = async (nhkApp: Fr, contractAddress: AztecAddress) => { // 1. Get all the note hashes - const blocks = await aztecNode.getBlocks(BlockNumber(INITIAL_L2_BLOCK_NUM), 1000); - const noteHashes = blocks.flatMap((block: L2Block) => - block.body.txEffects.flatMap(txEffect => txEffect.noteHashes), - ); + const blocks = await aztecNode.getBlocks(BlockNumber(INITIAL_L2_BLOCK_NUM), 1000, { + includeTransactions: true, + }); + const noteHashes = blocks.flatMap(block => block.body.txEffects.flatMap(txEffect => txEffect.noteHashes)); // 2. Get all the seen nullifiers - const nullifiers = blocks.flatMap((block: L2Block) => - block.body.txEffects.flatMap(txEffect => txEffect.nullifiers), - ); + const nullifiers = blocks.flatMap(block => block.body.txEffects.flatMap(txEffect => txEffect.nullifiers)); // 3. Derive all the possible nullifiers using nhkApp const derivedNullifiers = await Promise.all( noteHashes.map(async noteHash => { diff --git a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts index c16d89979900..e77df10cb097 100644 --- a/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_publisher/e2e_l1_publisher.test.ts @@ -54,14 +54,15 @@ import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/ import { type L1RollupConstants, getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { GasFees, GasSettings } from '@aztec/stdlib/gas'; import { tryStop } from '@aztec/stdlib/interfaces/server'; -import { orderAttestations } from '@aztec/stdlib/p2p'; -import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { - fr, - makeAndSignCommitteeAttestationsAndSigners, - makeCheckpointAttestationFromCheckpoint, - mockProcessedTx, -} from '@aztec/stdlib/testing'; + CheckpointProposal, + ConsensusPayload, + CheckpointAttestation as P2PCheckpointAttestation, + getHashedSignaturePayloadTypedData, + orderAttestations, +} from '@aztec/stdlib/p2p'; +import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { fr, mockProcessedTx } from '@aztec/stdlib/testing'; import type { BlockHeader, CheckpointGlobalVariables, ProcessedTx } from '@aztec/stdlib/tx'; import { type MerkleTreeAdminDatabase, @@ -136,6 +137,28 @@ describe('L1Publisher integration', () => { let rpcUrl: string; let anvil: Anvil; + const getSignatureContext = () => ({ + chainId, + rollupAddress: l1ContractAddresses.rollupAddress, + }); + const makeCheckpointAttestationForCurrentContext = (checkpoint: Checkpoint, signer: Secp256k1Signer) => { + const signatureContext = getSignatureContext(); + const payload = ConsensusPayload.fromCheckpoint(checkpoint, signatureContext); + const attestationDigest = getHashedSignaturePayloadTypedData(payload); + const proposal = new CheckpointProposal( + checkpoint.header, + checkpoint.archive.root, + checkpoint.feeAssetPriceModifier, + Signature.empty(), + signatureContext, + ); + const proposalDigest = getHashedSignaturePayloadTypedData(proposal); + return new P2PCheckpointAttestation(payload, signer.sign(attestationDigest), signer.sign(proposalDigest)); + }; + const signAttestationsAndSigners = ( + attestationsAndSigners: CommitteeAttestationsAndSigners, + signer: Secp256k1Signer, + ) => signer.sign(getHashedSignaturePayloadTypedData(attestationsAndSigners)); const progressTimeBySlot = async (slotsToJump = 1) => { const currentTime = (await l1Client.getBlock()).timestamp; @@ -481,7 +504,7 @@ describe('L1Publisher integration', () => { await publisher.enqueueProposeCheckpoint( checkpoint, - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(getSignatureContext()), Signature.empty(), ); await publisher.sendRequests(); @@ -527,7 +550,7 @@ describe('L1Publisher integration', () => { feeAssetPriceModifier: 0n, }, }, - CommitteeAttestationsAndSigners.empty().getPackedAttestations(), + CommitteeAttestationsAndSigners.packAttestations([]), [], Signature.empty().toViemSignature(), getPrefixedEthBlobCommitments(blockBlobs), @@ -592,7 +615,7 @@ describe('L1Publisher integration', () => { ) => { await publisher.enqueueProposeCheckpoint( checkpoint, - new CommitteeAttestationsAndSigners(attestations), + new CommitteeAttestationsAndSigners(attestations, getSignatureContext()), signature, ); const result = await publisher.sendRequests(); @@ -604,7 +627,7 @@ describe('L1Publisher integration', () => { const { checkpoint } = await buildSingleCheckpoint(); const block = checkpoint.blocks[0]; - const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationForCurrentContext(checkpoint, v)); const attestations = orderAttestations(checkpointAttestations, committee!); const canPropose = await publisher.canProposeAt(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); @@ -613,11 +636,8 @@ describe('L1Publisher integration', () => { const proposerSigner = validators.find(v => v.address.equals(proposer!)); - const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); - const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( - attestationsAndSigners, - proposerSigner!, - ); + const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations, getSignatureContext()); + const attestationsAndSignersSignature = signAttestationsAndSigners(attestationsAndSigners, proposerSigner!); await expectPublishCheckpoint(checkpoint, attestations, attestationsAndSignersSignature); }); @@ -625,11 +645,11 @@ describe('L1Publisher integration', () => { it('fails to publish a block without the proposer attestation', async () => { const { checkpoint } = await buildSingleCheckpoint(); const block = checkpoint.blocks[0]; - const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationForCurrentContext(checkpoint, v)); // Reverse attestations to break proposer attestation const attestations = orderAttestations(checkpointAttestations, committee!).reverse(); - const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); + const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations, getSignatureContext()); const canPropose = await publisher.canProposeAt(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); @@ -643,15 +663,15 @@ describe('L1Publisher integration', () => { it('rejects flipped proposer signature', async () => { const { checkpoint } = await buildSingleCheckpoint(); const block = checkpoint.blocks[0]; - const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationForCurrentContext(checkpoint, v)); const attestations = orderAttestations(checkpointAttestations, committee!); const canPropose = await publisher.canProposeAt(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); await publisher.validateBlockHeader(checkpoint.header); - const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); - const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( + const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations, getSignatureContext()); + const attestationsAndSignersSignature = signAttestationsAndSigners( attestationsAndSigners, validators.find(v => v.address.equals(proposer!))!, ); @@ -668,15 +688,15 @@ describe('L1Publisher integration', () => { it('rejects signature with invalid recovery value', async () => { const { checkpoint } = await buildSingleCheckpoint(); const block = checkpoint.blocks[0]; - const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationForCurrentContext(checkpoint, v)); const attestations = orderAttestations(checkpointAttestations, committee!); const canPropose = await publisher.canProposeAt(new Fr(GENESIS_ARCHIVE_ROOT), proposer!); expect(canPropose?.slot).toEqual(block.header.getSlot()); await publisher.validateBlockHeader(checkpoint.header); - const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); - const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( + const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations, getSignatureContext()); + const attestationsAndSignersSignature = signAttestationsAndSigners( attestationsAndSigners, validators.find(v => v.address.equals(proposer!))!, ); @@ -699,11 +719,11 @@ describe('L1Publisher integration', () => { // Publish the first invalid block const badCheckpointAttestations = validators .filter(v => v.address.equals(proposer!)) - .map(v => makeCheckpointAttestationFromCheckpoint(badCheckpoint, v)); + .map(v => makeCheckpointAttestationForCurrentContext(badCheckpoint, v)); const badAttestations = orderAttestations(badCheckpointAttestations, committee!); - const badAttestationsAndSigners = new CommitteeAttestationsAndSigners(badAttestations); - const badAttestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( + const badAttestationsAndSigners = new CommitteeAttestationsAndSigners(badAttestations, getSignatureContext()); + const badAttestationsAndSignersSignature = signAttestationsAndSigners( badAttestationsAndSigners, validators.find(v => v.address.equals(proposer!))!, ); @@ -721,7 +741,7 @@ describe('L1Publisher integration', () => { const { checkpoint } = await buildSingleCheckpoint({ blockNumber: BlockNumber(1) }); const block = checkpoint.blocks[0]; expect(block.number).toEqual(badBlock.number); - const checkpointAttestations = validators.map(v => makeCheckpointAttestationFromCheckpoint(checkpoint, v)); + const checkpointAttestations = validators.map(v => makeCheckpointAttestationForCurrentContext(checkpoint, v)); const attestations = orderAttestations(checkpointAttestations, committee!); // Check we can invalidate the checkpoint @@ -756,8 +776,8 @@ describe('L1Publisher integration', () => { await publisher.validateBlockHeader(checkpoint.header, invalidationSimulationOverridesPlan); // At this point I'm gonna need to propose the correct signature ye? So confused actually here. - const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations); - const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( + const attestationsAndSigners = new CommitteeAttestationsAndSigners(attestations, getSignatureContext()); + const attestationsAndSignersSignature = signAttestationsAndSigners( attestationsAndSigners, validators.find(v => v.address.equals(proposer!))!, ); @@ -783,7 +803,11 @@ describe('L1Publisher integration', () => { const { checkpoint } = await buildSingleCheckpoint(); const block = checkpoint.blocks[0]; - await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); + await publisher.enqueueProposeCheckpoint( + checkpoint, + CommitteeAttestationsAndSigners.empty(getSignatureContext()), + Signature.empty(), + ); await publisher.enqueueGovernanceCastSignal( l1ContractAddresses.rollupAddress, block.slot, @@ -807,7 +831,11 @@ describe('L1Publisher integration', () => { // Expect the simulation to fail const loggerErrorSpy = jest.spyOn((publisher as any).log, 'error'); await expect( - publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()), + publisher.enqueueProposeCheckpoint( + checkpoint, + CommitteeAttestationsAndSigners.empty(getSignatureContext()), + Signature.empty(), + ), ).rejects.toThrow(/Rollup__InvalidInHash/); expect(loggerErrorSpy).toHaveBeenNthCalledWith( 2, @@ -855,9 +883,12 @@ describe('L1Publisher integration', () => { }; const enqueueProposeL2Checkpoint = async (checkpoint: Checkpoint) => { - await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty(), { - txTimeoutAt: getProposeTxTimeoutAt(checkpoint), - }); + await publisher.enqueueProposeCheckpoint( + checkpoint, + CommitteeAttestationsAndSigners.empty(getSignatureContext()), + Signature.empty(), + { txTimeoutAt: getProposeTxTimeoutAt(checkpoint) }, + ); }; it(`cancels block proposal when the L2 slot ends`, async () => { diff --git a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts index 2244713331af..2e1be2bb0450 100644 --- a/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts +++ b/yarn-project/end-to-end/src/e2e_multi_validator/e2e_multi_validator_node.test.ts @@ -125,7 +125,11 @@ describe('e2e_multi_validator_node', () => { const dataStore = (aztecNode as AztecNodeService).getBlockSource() as Archiver; const checkpointedBlock = await dataStore.getCheckpointedBlock(tx.blockNumber!); const [publishedCheckpoint] = await dataStore.getCheckpoints(checkpointedBlock!.checkpointNumber, 1); - const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const signatureContext = { + chainId: config.l1ChainId, + rollupAddress: deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint, signatureContext); const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); @@ -183,7 +187,11 @@ describe('e2e_multi_validator_node', () => { const dataStore = (aztecNode as AztecNodeService).getBlockSource() as Archiver; const checkpointedBlock = await dataStore.getCheckpointedBlock(tx.blockNumber!); const [publishedCheckpoint] = await dataStore.getCheckpoints(checkpointedBlock!.checkpointNumber, 1); - const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const signatureContext = { + chainId: config.l1ChainId, + rollupAddress: deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint, signatureContext); const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); diff --git a/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts b/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts index a766ed446312..316fd63a5bba 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_blobs.test.ts @@ -10,6 +10,7 @@ import { FIELDS_PER_BLOB } from '@aztec/constants'; import { AvmGadgetsTestContract } from '@aztec/noir-test-contracts.js/AvmGadgetsTest'; import { AvmTestContract } from '@aztec/noir-test-contracts.js/AvmTest'; import { TestContract } from '@aztec/noir-test-contracts.js/Test'; +import { L2Block } from '@aztec/stdlib/block'; import type { AztecNodeAdmin } from '@aztec/stdlib/interfaces/client'; import { setup } from './fixtures/utils.js'; @@ -80,7 +81,14 @@ describe('e2e_multiple_blobs', () => { const blockNumber = receipts[0].blockNumber!; expect(receipts.every(r => r.blockNumber === blockNumber)).toBe(true); - const block = (await aztecNode.getBlock(blockNumber))!; + const response = (await aztecNode.getBlock(blockNumber, { includeTransactions: true }))!; + const block = new L2Block( + response.archive, + response.header, + response.body, + response.checkpointNumber, + response.indexWithinCheckpoint, + ); const numBlobFields = encodeCheckpointBlobDataFromBlocks([block.toBlockBlobData()]).length; const numBlobs = Math.ceil(numBlobFields / FIELDS_PER_BLOB); diff --git a/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts b/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts index 45cb7db1756f..626b9f80da92 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/fee_asset_price_oracle_gossip.test.ts @@ -185,7 +185,11 @@ describe('e2e_p2p_network', () => { 120, 1, ); - const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const signatureContext = { + chainId: t.ctx.aztecNodeConfig.l1ChainId, + rollupAddress: t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint, signatureContext); const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 7cc1cfb7effa..e499925375e8 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -188,7 +188,11 @@ describe('e2e_p2p_network', () => { const dataStore = (nodes[0] as AztecNodeService).getBlockSource() as Archiver; const checkpointedBlock = await dataStore.getCheckpointedBlock(blockNumber); const [publishedCheckpoint] = await dataStore.getCheckpoints(checkpointedBlock!.checkpointNumber, 1); - const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const signatureContext = { + chainId: t.ctx.aztecNodeConfig.l1ChainId, + rollupAddress: t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint, signatureContext); const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); @@ -207,7 +211,7 @@ describe('e2e_p2p_network', () => { // Ensure prover node did its job and collected txs from p2p await retryUntil( async () => { - const provenBlock = await nodes[0].getProvenBlockNumber(); + const provenBlock = await nodes[0].getBlockNumber('proven'); return provenBlock > 0; }, 'proven block', diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts index 2534d4dfafb9..cc99e6283744 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network_no_cheat.test.ts @@ -206,7 +206,11 @@ describe('e2e_p2p_network', () => { // validator committee. If we submit txs before a checkpoint lands on L1, a failed checkpoint // publish can prune locally-proposed blocks, causing txs to reference pruned block headers. t.logger.info('Waiting for first checkpoint to be published'); - await retryUntil(async () => (await nodes[0].getCheckpointedBlockNumber()) > 0, 'first checkpoint published', 120); + await retryUntil( + async () => (await nodes[0].getBlockNumber('checkpointed')) > 0, + 'first checkpoint published', + 120, + ); t.logger.info('First checkpoint published'); // We need to `createNodes` before we setup account, because @@ -244,7 +248,11 @@ describe('e2e_p2p_network', () => { const dataStore = (nodes[0] as AztecNodeService).getBlockSource() as Archiver; const checkpointedBlock = await dataStore.getCheckpointedBlock(blockNumber); const [publishedCheckpoint] = await dataStore.getCheckpoints(checkpointedBlock!.checkpointNumber, 1); - const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const signatureContext = { + chainId: t.ctx.aztecNodeConfig.l1ChainId, + rollupAddress: t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint, signatureContext); const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); diff --git a/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts index 49f68546e618..25c96a232ea2 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/preferred_gossip_network.test.ts @@ -357,7 +357,11 @@ describe('e2e_p2p_preferred_network', () => { const dataStore = (nodes[0] as AztecNodeService).getBlockSource() as Archiver; const checkpointedBlock = await dataStore.getCheckpointedBlock(blockNumber); const [publishedCheckpoint] = await dataStore.getCheckpoints(checkpointedBlock!.checkpointNumber, 1); - const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint); + const signatureContext = { + chainId: t.ctx.aztecNodeConfig.l1ChainId, + rollupAddress: t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; + const payload = ConsensusPayload.fromCheckpoint(publishedCheckpoint.checkpoint, signatureContext); const attestations = publishedCheckpoint.attestations .filter(a => !a.signature.isEmpty()) .map(a => new CheckpointAttestation(payload, a.signature, Signature.empty())); diff --git a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts index b82db05ed56f..47f81edb57d4 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reex.test.ts @@ -129,6 +129,10 @@ describe('e2e_p2p_reex', () => { jest.spyOn(p2pClient, 'broadcastProposal').mockImplementation(async (...args: unknown[]) => { // We remove one of the transactions, therefore the block root will be different! const proposal = args[0] as BlockProposal; + const signatureContext = { + chainId: t.ctx.aztecNodeConfig.l1ChainId, + rollupAddress: t.ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }; const proposerAddress = proposal.getSender(); const txHashes = proposal.txHashes; @@ -146,7 +150,8 @@ describe('e2e_p2p_reex', () => { proposal.archiveRoot, proposal.txHashes, undefined, - (payload, context) => signer.signMessageWithAddress(proposerAddress!, payload, context), + signatureContext, + (typedData, context) => signer.signTypedDataWithAddress(proposerAddress!, typedData, context), ); const p2pService = (p2pClient as any).p2pService as LibP2PService; diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts index 8472e9bf2f31..71f440695add 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp/utils.ts @@ -194,8 +194,8 @@ export async function runReqrespTxTest(params: { // Wait for L1 checkpoint sync, which may lag behind P2P block propagation. const checkpoints = await retryUntil( async () => { - const cps = await nodes[0].getCheckpoints(CheckpointNumber(1), 50); - return cps.length > 0 && cps.some(cp => cp.checkpoint.blocks.length >= 2) ? cps : undefined; + const cps = await nodes[0].getCheckpoints(CheckpointNumber(1), 50, { includeBlocks: true }); + return cps.length > 0 && cps.some(cp => (cp.blocks?.length ?? 0) >= 2) ? cps : undefined; }, 'waiting for multi-block checkpoint to sync from L1', 30, @@ -203,16 +203,17 @@ export async function runReqrespTxTest(params: { ); let mbpsFound = false; - let expectedBlockNumber = checkpoints[0].checkpoint.blocks[0].number; + let expectedBlockNumber = checkpoints[0].blocks![0].number; for (const published of checkpoints) { - const blockCount = published.checkpoint.blocks.length; + const blocks = published.blocks!; + const blockCount = blocks.length; mbpsFound = mbpsFound || blockCount >= 2; for (let i = 0; i < blockCount; i++) { - const block = published.checkpoint.blocks[i]; + const block = blocks[i]; expect(block.indexWithinCheckpoint).toBe(i); - expect(block.checkpointNumber).toBe(published.checkpoint.number); + expect(block.checkpointNumber).toBe(published.number); expect(block.number).toBe(expectedBlockNumber); expectedBlockNumber++; } diff --git a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts index e69a176d72e7..05e3086eccda 100644 --- a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts @@ -35,7 +35,7 @@ describe('e2e_pending_note_hashes_contract', () => { const expectNoteHashesSquashedExcept = async (exceptFirstFew: number) => { const blockNum = await aztecNode.getBlockNumber(); - const block = (await aztecNode.getBlocks(blockNum, 1))[0]; + const block = (await aztecNode.getBlocks(blockNum, 1, { includeTransactions: true }))[0]; const noteHashes = block.body.txEffects.flatMap(txEffect => txEffect.noteHashes); @@ -51,7 +51,7 @@ describe('e2e_pending_note_hashes_contract', () => { const expectNullifiersSquashedExcept = async (exceptFirstFew: number) => { const blockNum = await aztecNode.getBlockNumber(); - const block = (await aztecNode.getBlocks(blockNum, 1))[0]; + const block = (await aztecNode.getBlocks(blockNum, 1, { includeTransactions: true }))[0]; const nullifierArray = block.body.txEffects.flatMap(txEffect => txEffect.nullifiers); @@ -67,7 +67,7 @@ describe('e2e_pending_note_hashes_contract', () => { const expectNoteLogsSquashedExcept = async (exceptFirstFew: number) => { const blockNum = await aztecNode.getBlockNumber(); - const block = (await aztecNode.getBlocks(blockNum, 1))[0]; + const block = (await aztecNode.getBlocks(blockNum, 1, { includeTransactions: true }))[0]; const privateLogs = block.body.txEffects.flatMap(txEffect => txEffect.privateLogs); expect(privateLogs.length).toBe(exceptFirstFew); diff --git a/yarn-project/end-to-end/src/e2e_prover/full.test.ts b/yarn-project/end-to-end/src/e2e_prover/full.test.ts index 7d70a5b443d2..0afaf59aac08 100644 --- a/yarn-project/end-to-end/src/e2e_prover/full.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/full.test.ts @@ -161,10 +161,11 @@ describe('full_prover', () => { const publishedCheckpoints = await t.aztecNode.getCheckpoints( CheckpointNumber(Number(oldProvenCheckpointNumber) + 1), numCheckpointsProven, + { includeBlocks: true }, ); // Extract all blocks from all proven checkpoints - const allBlocks = publishedCheckpoints.flatMap(pc => pc.checkpoint.blocks); + const allBlocks = publishedCheckpoints.flatMap(pc => pc.blocks!); const fees = allBlocks.map(b => b.header.totalFees.toBigInt()); const totalRewards = fees.map(fee => fee + reward).reduce((acc, reward) => acc + reward, 0n); diff --git a/yarn-project/end-to-end/src/e2e_simple.test.ts b/yarn-project/end-to-end/src/e2e_simple.test.ts index afee9d9dbd92..fd070f124078 100644 --- a/yarn-project/end-to-end/src/e2e_simple.test.ts +++ b/yarn-project/end-to-end/src/e2e_simple.test.ts @@ -56,16 +56,14 @@ describe('e2e_simple', () => { const initialHeader = await aztecNode.getBlockHeader(BlockNumber.ZERO); expect(initialHeader).toBeDefined(); const initialHeaderHash = await initialHeader!.hash(); - const initialBlockByHash = await aztecNode.getBlock(initialHeaderHash); + const initialBlockByHash = await aztecNode.getBlock(initialHeaderHash, { includeTransactions: true }); expect(initialBlockByHash).toBeDefined(); - const initialBlockHash = await initialBlockByHash!.hash(); - expect(initialBlockHash.equals(initialHeaderHash)).toBeTrue(); - expect(initialBlockByHash?.body.txEffects.length).toBe(0); - const initialBlockByNumber = await aztecNode.getBlock(BlockNumber.ZERO); + expect(initialBlockByHash!.hash.equals(initialHeaderHash)).toBeTrue(); + expect(initialBlockByHash!.body.txEffects.length).toBe(0); + const initialBlockByNumber = await aztecNode.getBlock(BlockNumber.ZERO, { includeTransactions: true }); expect(initialBlockByNumber).toBeDefined(); - const initialBlockByNumberHash = await initialBlockByNumber!.hash(); - expect(initialBlockByNumberHash.equals(initialHeaderHash)).toBeTrue(); - expect(initialBlockByNumber?.body.txEffects.length).toBe(0); + expect(initialBlockByNumber!.hash.equals(initialHeaderHash)).toBeTrue(); + expect(initialBlockByNumber!.body.txEffects.length).toBe(0); }); it('deploys a contract', async () => { diff --git a/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts b/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts index 75a0597f34c7..818423f7b621 100644 --- a/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts +++ b/yarn-project/end-to-end/src/e2e_snapshot_sync.test.ts @@ -99,7 +99,7 @@ describe('e2e_snapshot_sync', () => { const block = await node.getBlock(BlockNumber(L2_TARGET_BLOCK_NUM)); expect(block).toBeDefined(); - const blockHash = await block!.hash(); + const blockHash = block!.hash; log.warn(`Checking for L2 block ${L2_TARGET_BLOCK_NUM} with hash ${blockHash} on both nodes`); const getBlockHashLeafIndex = (node: AztecNode) => @@ -205,7 +205,7 @@ describe('e2e_snapshot_sync', () => { const block = await node.getBlock(BlockNumber(L2_TARGET_BLOCK_NUM)); expect(block).toBeDefined(); - const blockHash = await block!.hash(); + const blockHash = block!.hash; log.warn(`Checking for L2 block ${L2_TARGET_BLOCK_NUM} with hash ${blockHash} on both nodes`); const getBlockHashLeafIndex = (node: AztecNode) => diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index d18a36e7b742..fb140b65439a 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -57,7 +57,7 @@ import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { SpamContract } from '@aztec/noir-test-contracts.js/Spam'; import { SequencerPublisher, SequencerPublisherMetrics } from '@aztec/sequencer-client'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { CommitteeAttestationsAndSigners } from '@aztec/stdlib/block'; +import { CommitteeAttestationsAndSigners, L2Block } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; import { tryStop } from '@aztec/stdlib/interfaces/server'; import { createWorldStateSynchronizer } from '@aztec/world-state'; @@ -379,8 +379,20 @@ describe('e2e_synching', () => { } const blockNumber = await aztecNode.getBlockNumber(); - const publishedCheckpoints = await aztecNode.getCheckpoints(CheckpointNumber(1), blockNumber); - const checkpoints = publishedCheckpoints.map(pc => pc.checkpoint); + const checkpointResponses = await aztecNode.getCheckpoints(CheckpointNumber(1), blockNumber, { + includeBlocks: true, + includeTransactions: true, + }); + const checkpoints = checkpointResponses.map( + cr => + new Checkpoint( + cr.archive, + cr.header, + cr.blocks!.map(b => new L2Block(b.archive, b.header, b.body!, b.checkpointNumber, b.indexWithinCheckpoint)), + cr.number, + cr.feeAssetPriceModifier, + ), + ); await variant.writeCheckpoints(checkpoints); await teardown(); @@ -465,7 +477,14 @@ describe('e2e_synching', () => { await cheatCodes.eth.mine(); } // If it breaks here, first place you should look is the pruning. - await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); + await publisher.enqueueProposeCheckpoint( + checkpoint, + CommitteeAttestationsAndSigners.empty({ + chainId: 31337, + rollupAddress: deployL1ContractsValues.l1ContractAddresses.rollupAddress, + }), + Signature.empty(), + ); await cheatCodes.rollup.markAsProven(CheckpointNumber(provenThrough)); } diff --git a/yarn-project/end-to-end/src/fixtures/setup.ts b/yarn-project/end-to-end/src/fixtures/setup.ts index 21535e00cf4d..9905f35df3de 100644 --- a/yarn-project/end-to-end/src/fixtures/setup.ts +++ b/yarn-project/end-to-end/src/fixtures/setup.ts @@ -734,7 +734,7 @@ export async function waitForProvenChain(node: AztecNode, targetBlock?: BlockNum targetBlock ??= await node.getBlockNumber(); await retryUntil( - async () => (await node.getProvenBlockNumber()) >= targetBlock, + async () => (await node.getBlockNumber('proven')) >= targetBlock, 'proven chain status', timeoutSec, intervalSec, diff --git a/yarn-project/end-to-end/src/spartan/block_capacity.test.ts b/yarn-project/end-to-end/src/spartan/block_capacity.test.ts index f4c6b9fabd55..41f99888907a 100644 --- a/yarn-project/end-to-end/src/spartan/block_capacity.test.ts +++ b/yarn-project/end-to-end/src/spartan/block_capacity.test.ts @@ -281,7 +281,7 @@ describe('block capacity benchmark', () => { if (blockTxCounts.some(b => b.blockNumber === bn)) { continue; } - const block = await aztecNode.getBlock(BlockNumber(bn)); + const block = await aztecNode.getBlock(BlockNumber(bn), { includeTransactions: true }); if (block) { const txCount = block.body.txEffects.length; blockTxCounts.push({ blockNumber: bn, txCount }); diff --git a/yarn-project/end-to-end/src/spartan/n_tps.test.ts b/yarn-project/end-to-end/src/spartan/n_tps.test.ts index 2840e92520e2..16bda90b2799 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps.test.ts @@ -321,9 +321,18 @@ describe('sustained N TPS test', () => { salt, ); const deployMethod = await manager.getDeployMethod(); + // Explicit gas estimation: BaseWallet's fallback bakes + // APPROXIMATE_MAX_DA_GAS_PER_BLOCK=196_608 daGas into deploys, which exceeds + // the proposer's per-block fair-share daGas (~94k at 10 blocks/checkpoint + // with pipelining). Estimate first, send with the result. EmbeddedWallet + // does this automatically; TestWallet (used here via WorkerWallet) does not. + const deploySim = await deployMethod.simulate({ + from: NO_FROM, + fee: { paymentMethod: sponsor, estimateGas: true }, + }); await deployMethod.send({ from: NO_FROM, - fee: { paymentMethod: sponsor }, + fee: { paymentMethod: sponsor, gasSettings: deploySim.estimatedGas }, wait: { timeout: 2400 }, }); return address; @@ -541,18 +550,25 @@ describe('sustained N TPS test', () => { aztecNode: highValueTestWallets[i].aztecNode, address: highValueAddresses[i], })); + const startedAt = new Date().toISOString(); sendTxsAtTps(logger, abortController.signal, lowValueLanes, lowValueTps, lowValueSendTx); const sentTxHashes = sendTxsAtTps(logger, abortController.signal, highValueLanes, highValueTps, highValueSendTx); await sleep(TEST_DURATION_SECONDS * 1000); abortController.abort(); + const endedAt = new Date().toISOString(); logger.info('Stopped transaction senders', { lowValueTxs, highValueTxs, highValueSent: sentTxHashes.length, }); + // metadata about the test run for the scraper script + const metadataPath = '/tmp/n_tps_timing_data.json'; + await writeFile(metadataPath, JSON.stringify({ startedAt, endedAt, runId: process.env.BENCH_RUN_ID })); + logger.info('Wrote benchmark metadata', { path: metadataPath, startedAt, endedAt }); + const results: { success: boolean; txHash: string; error?: any }[] = []; const waitForTx = async (txHash: string, txName: string) => { try { diff --git a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts index 7d1f5bc9a2c3..3519bdce1092 100644 --- a/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts +++ b/yarn-project/end-to-end/src/spartan/n_tps_prove.test.ts @@ -575,14 +575,14 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { ); // Poll for proof completion while detecting reorgs let lastBlockNumber = await aztecNode.getBlockNumber(); - const currentProvenBlock = await aztecNode.getProvenBlockNumber(); + const currentProvenBlock = await aztecNode.getBlockNumber('proven'); logger.info(`Waiting for proven chain to advance ${currentProvenBlock} -> ${targetProvenBlock}...`); const PROOF_TIMEOUT_S = 2 * epochDurationSeconds; const proofTimer = new Timer(); while (true) { const [provenBlock, currentBlockNumber] = await Promise.all([ - aztecNode.getProvenBlockNumber(), + aztecNode.getBlockNumber('proven'), aztecNode.getBlockNumber(), ]); @@ -622,7 +622,7 @@ describe(`prove ${TARGET_TPS}TPS test`, () => { metrics.recordProofDuration(proofDurationSeconds); logger.info(`Epoch ${targetProofEpoch} proof completed in ${proofDurationSeconds.toFixed(1)}s`); - const finalProvenBlock = await aztecNode.getProvenBlockNumber(); + const finalProvenBlock = await aztecNode.getBlockNumber('proven'); expect(finalProvenBlock).toBeGreaterThanOrEqual(targetProvenBlock); logger.info('Test completed successfully'); diff --git a/yarn-project/end-to-end/src/spartan/proving.test.ts b/yarn-project/end-to-end/src/spartan/proving.test.ts index d03b13f21deb..0c3bd5aa94fa 100644 --- a/yarn-project/end-to-end/src/spartan/proving.test.ts +++ b/yarn-project/end-to-end/src/spartan/proving.test.ts @@ -31,7 +31,7 @@ describe('proving test', () => { it('advances the proven chain', async () => { let [provenBlockNumber, blockNumber] = await Promise.all([ - aztecNode.getProvenBlockNumber(), + aztecNode.getBlockNumber('proven'), aztecNode.getBlockNumber(), ]); let ok: boolean; @@ -41,7 +41,7 @@ describe('proving test', () => { while (true) { const [newProvenBlockNumber, newBlockNumber] = await Promise.all([ - aztecNode.getProvenBlockNumber(), + aztecNode.getBlockNumber('proven'), aztecNode.getBlockNumber(), ]); diff --git a/yarn-project/end-to-end/src/spartan/tx_metrics.ts b/yarn-project/end-to-end/src/spartan/tx_metrics.ts index 400aafdb5edd..e027926e0f72 100644 --- a/yarn-project/end-to-end/src/spartan/tx_metrics.ts +++ b/yarn-project/end-to-end/src/spartan/tx_metrics.ts @@ -1,6 +1,6 @@ import type { AztecNode } from '@aztec/aztec.js/node'; import type { Logger } from '@aztec/foundation/log'; -import type { L2Block } from '@aztec/stdlib/block'; +import type { BlockResponse } from '@aztec/stdlib/interfaces/client'; import type { TopicType } from '@aztec/stdlib/p2p'; import { Tx, type TxReceipt } from '@aztec/stdlib/tx'; @@ -147,7 +147,7 @@ export type TxInclusionData = { export class TxInclusionMetrics { private data = new Map(); private groups = new Set(); - private blocks = new Map>(); + private blocks = new Map | undefined>>(); private p2pGossipLatencyByTopic: Partial> = {}; @@ -198,7 +198,7 @@ export class TxInclusionMetrics { } if (!this.blocks.has(blockNumber)) { - this.blocks.set(blockNumber, this.aztecNode.getBlock(blockNumber)); + this.blocks.set(blockNumber, this.aztecNode.getBlock(blockNumber, { includeTransactions: true })); } const block = await this.blocks.get(blockNumber)!; diff --git a/yarn-project/ethereum/src/l1_reader.ts b/yarn-project/ethereum/src/l1_reader.ts index 4d31bbd78ac4..b23ed6f1e621 100644 --- a/yarn-project/ethereum/src/l1_reader.ts +++ b/yarn-project/ethereum/src/l1_reader.ts @@ -30,7 +30,7 @@ export const l1ReaderConfigMappings: ConfigMappingsType = { }, l1ChainId: { env: 'L1_CHAIN_ID', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The chain ID of the ethereum host.', }, l1RpcUrls: { diff --git a/yarn-project/foundation/src/types/index.ts b/yarn-project/foundation/src/types/index.ts index dcf7cf926d70..d8c54ad47e79 100644 --- a/yarn-project/foundation/src/types/index.ts +++ b/yarn-project/foundation/src/types/index.ts @@ -34,6 +34,20 @@ export type Prettify = { [K in keyof T]: T[K]; } & {}; +/** Returns a type with fields conditionally required based on a flag */ +export type IfFlag< + OptsSchema, + Opts extends OptsSchema, + Key extends keyof OptsSchema, + Field extends object, +> = Opts extends { [K in Key]: true } + ? Field + : Opts extends { [K in Key]: false } + ? {} + : Opts extends { [K in Key]?: boolean } + ? Partial + : {}; + /** * Type-safe Event Emitter type * @example diff --git a/yarn-project/p2p/src/client/factory.ts b/yarn-project/p2p/src/client/factory.ts index 8ad423b3c1b1..8e702494697e 100644 --- a/yarn-project/p2p/src/client/factory.ts +++ b/yarn-project/p2p/src/client/factory.ts @@ -76,7 +76,7 @@ export async function createP2PClient( const store = deps.store ?? (await createStore(P2P_STORE_NAME, 2, config, bindings)); const archive = await createStore(P2P_ARCHIVE_STORE_NAME, 1, config, bindings); const peerStore = await createStore(P2P_PEER_STORE_NAME, 1, config, bindings); - const attestationStore = await createStore(P2P_ATTESTATION_STORE_NAME, 1, config, bindings); + const attestationStore = await createStore(P2P_ATTESTATION_STORE_NAME, 2, config, bindings); const l1Constants = await archiver.getL1Constants(); const rollupAddress = inputConfig.l1Contracts.rollupAddress.toString().toLowerCase().replace(/^0x/, ''); diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 6cedf8c8131e..1199b6d701c6 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -6,6 +6,7 @@ import { getConfigFromMappings, getDefaultConfig, numberConfigHelper, + optionalNumberConfigHelper, percentageConfigHelper, pickConfigMappings, secretStringConfigHelper, @@ -216,6 +217,9 @@ export interface P2PConfig /** Minimum percentage fee increase required to replace an existing tx via RPC (0 = no bump). */ priceBumpPercentage: bigint; + + /** Drop incoming block and checkpoint proposals at the libp2p dispatch layer (for testing only) */ + skipIncomingProposals?: boolean; } export const DEFAULT_P2P_PORT = 40400; @@ -225,23 +229,23 @@ export const p2pConfigMappings: ConfigMappingsType = { env: 'VALIDATOR_MAX_TX_PER_BLOCK', description: 'Maximum transactions per block for validation. Overrides maxTxsPerBlock for gossip validation when set.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, validateMaxTxsPerCheckpoint: { env: 'VALIDATOR_MAX_TX_PER_CHECKPOINT', description: 'Maximum transactions per checkpoint for validation. Used as fallback for maxTxsPerBlock when that is not set.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, validateMaxL2BlockGas: { env: 'VALIDATOR_MAX_L2_BLOCK_GAS', description: 'Maximum L2 gas per block for validation. When set, txs exceeding this limit are rejected.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, validateMaxDABlockGas: { env: 'VALIDATOR_MAX_DA_BLOCK_GAS', description: 'Maximum DA gas per block for validation. When set, txs exceeding this limit are rejected.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, p2pEnabled: { env: 'P2P_ENABLED', @@ -443,7 +447,7 @@ export const p2pConfigMappings: ConfigMappingsType = { }, p2pStoreMapSizeKb: { env: 'P2P_STORE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the P2P DB in KB. Overwrites the general dataStoreMapSizeKb.', }, txPublicSetupAllowListExtend: { @@ -505,7 +509,7 @@ export const p2pConfigMappings: ConfigMappingsType = { l1PublishingTime: { env: 'SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT', description: 'How much time (in seconds) we allow in the slot for publishing the L1 tx (defaults to 1 L1 slot).', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, fishermanMode: { env: 'FISHERMAN_MODE', @@ -518,6 +522,10 @@ export const p2pConfigMappings: ConfigMappingsType = { 'Broadcast block proposals even when a conflicting proposal for the same slot already exists in the pool (for testing purposes only).', ...booleanConfigHelper(false), }, + skipIncomingProposals: { + description: 'Drop incoming block and checkpoint proposals at the libp2p dispatch layer (for testing only)', + ...booleanConfigHelper(false), + }, minTxPoolAgeMs: { env: 'P2P_MIN_TX_POOL_AGE_MS', description: 'Minimum age (ms) a transaction must have been in the pool before it is eligible for block building.', diff --git a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts index 419ed432028a..6cca1f3df4e9 100644 --- a/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts +++ b/yarn-project/p2p/src/mem_pools/attestation_pool/mocks.ts @@ -3,11 +3,12 @@ import type { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer' import { Fr } from '@aztec/foundation/curves/bn254'; import { CheckpointAttestation, + CheckpointProposal, ConsensusPayload, - SignatureDomainSeparator, - getHashedSignaturePayloadEthSignedMessage, + getHashedSignaturePayloadTypedData, } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; +import { TEST_COORDINATION_SIGNATURE_CONTEXT } from '@aztec/stdlib/testing'; import { type LocalAccount, generatePrivateKey, privateKeyToAccount } from 'viem/accounts'; @@ -37,15 +38,19 @@ export const mockCheckpointAttestation = ( feeAssetPriceModifier: bigint = 0n, ): CheckpointAttestation => { header = header ?? CheckpointHeader.random({ slotNumber: SlotNumber(slot) }); - const payload = new ConsensusPayload(header, archive, feeAssetPriceModifier); + const payload = new ConsensusPayload(header, archive, feeAssetPriceModifier, TEST_COORDINATION_SIGNATURE_CONTEXT); - const attestationHash = getHashedSignaturePayloadEthSignedMessage( - payload, - SignatureDomainSeparator.checkpointAttestation, - ); + const attestationHash = getHashedSignaturePayloadTypedData(payload); const attestationSignature = signer.sign(attestationHash); - const proposalHash = getHashedSignaturePayloadEthSignedMessage(payload, SignatureDomainSeparator.checkpointProposal); + const proposal = new CheckpointProposal( + header, + archive, + feeAssetPriceModifier, + attestationSignature, + TEST_COORDINATION_SIGNATURE_CONTEXT, + ); + const proposalHash = getHashedSignaturePayloadTypedData(proposal); const proposerSignature = signer.sign(proposalHash); return new CheckpointAttestation(payload, attestationSignature, proposerSignature); diff --git a/yarn-project/p2p/src/mem_pools/instrumentation.ts b/yarn-project/p2p/src/mem_pools/instrumentation.ts index d76d2c30ad4a..57acd5d57f76 100644 --- a/yarn-project/p2p/src/mem_pools/instrumentation.ts +++ b/yarn-project/p2p/src/mem_pools/instrumentation.ts @@ -100,7 +100,11 @@ export class PoolInstrumentation { this.addObjectCounter = createUpDownCounterWithDefault(this.meter, metricsLabels.itemsAdded); - this.minedDelay = this.meter.createHistogram(metricsLabels.itemMinedDelay); + this.minedDelay = this.meter.createHistogram(metricsLabels.itemMinedDelay, { + advice: { + explicitBucketBoundaries: [100, 500, 1000, 5000, 10000, 30000, 60000, 300000, 600000, 1800000, 3600000], + }, + }); this.meter.addBatchObservableCallback(this.observeStats, [this.objectsInMempool]); } diff --git a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts index 1c1acd16e7f2..bdb09e22be7d 100644 --- a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts +++ b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.test.ts @@ -4,7 +4,7 @@ import { EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { PeerErrorSeverity } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; -import { makeCheckpointAttestation } from '@aztec/stdlib/testing'; +import { TEST_COORDINATION_SIGNATURE_CONTEXT, makeCheckpointAttestation } from '@aztec/stdlib/testing'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -22,11 +22,28 @@ describe('CheckpointAttestationValidator', () => { slotDuration: 72, ethereumSlotDuration: 12, } as any); - validator = new CheckpointAttestationValidator(epochCache, { l1PublishingTime: 12 }); + validator = new CheckpointAttestationValidator(epochCache, { + l1PublishingTime: 12, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, + }); proposer = Secp256k1Signer.random(); attester = Secp256k1Signer.random(); }); + it('rejects foreign signature context with low tolerance error', async () => { + const mockAttestation = makeCheckpointAttestation({ + attesterSigner: attester, + proposerSigner: proposer, + signatureContext: { + ...TEST_COORDINATION_SIGNATURE_CONTEXT, + chainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId + 1, + }, + }); + + const result = await validator.validate(mockAttestation); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.LowToleranceError }); + }); + it('returns high tolerance error if slot number is not current or next slot (outside clock tolerance)', async () => { const header = CheckpointHeader.random({ slotNumber: SlotNumber(97) }); const mockAttestation = makeCheckpointAttestation({ @@ -78,8 +95,9 @@ describe('CheckpointAttestationValidator', () => { expect(result).toEqual({ result: 'ignore' }); }); - it('accepts attestation for current slot until the target-slot publish cutoff', async () => { + it('accepts attestation for current slot inside the straggler window', async () => { // Attestation is for slot 98 (current wallclock slot), but targetSlot is 99 (pipelining). + // attestationWindowIntoTargetSlot = 2*p2p = 4s ⇒ straggler grace 4s+500ms disparity. const header = CheckpointHeader.random({ slotNumber: SlotNumber(98) }); const mockAttestation = makeCheckpointAttestation({ header, @@ -98,12 +116,11 @@ describe('CheckpointAttestationValidator', () => { ethereumSlotDuration: 12, } as any); - // Within attestation window: 59000ms elapsed < (slotDuration - l1PublishingTime) * 1000 = 60000ms epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: EpochNumber(1), slot: SlotNumber(98), ts: 1000n, - nowMs: 1059000n, // 59000ms elapsed + nowMs: 1003000n, // 3000ms elapsed, within 4500ms straggler grace }); epochCache.isInCommittee.mockResolvedValue(true); epochCache.getProposerAttesterAddressInSlot.mockResolvedValue(proposer.address); @@ -112,8 +129,7 @@ describe('CheckpointAttestationValidator', () => { expect(result).toEqual({ result: 'accept' }); }); - it('rejects attestation for current slot after the target-slot publish cutoff', async () => { - // Attestation is for slot 98 (one behind target slot 99), after the publish cutoff. + it('rejects attestation for current slot past the straggler window', async () => { const header = CheckpointHeader.random({ slotNumber: SlotNumber(98) }); const mockAttestation = makeCheckpointAttestation({ header, @@ -133,12 +149,11 @@ describe('CheckpointAttestationValidator', () => { ethereumSlotDuration: 12, } as any); - // Outside attestation window AND outside clock tolerance: 61000ms elapsed > 60000ms cutoff epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: EpochNumber(1), slot: SlotNumber(99), ts: 1000n, - nowMs: 1061000n, // 61000ms elapsed + nowMs: 1005000n, // 5000ms elapsed, past 4500ms straggler cutoff }); epochCache.isInCommittee.mockResolvedValue(true); diff --git a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts index 5d84604b592a..c4d30a69adc5 100644 --- a/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts +++ b/yarn-project/p2p/src/msg_validators/attestation_validator/attestation_validator.ts @@ -3,9 +3,11 @@ import { NoCommitteeError } from '@aztec/ethereum/contracts'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { type CheckpointAttestation, + type CoordinationSignatureContext, type P2PValidator, PeerErrorSeverity, type ValidationResult, + hasValidSignatureContext, } from '@aztec/stdlib/p2p'; import { PipeliningWindow, isWithinClockTolerance } from '../clock_tolerance.js'; @@ -14,15 +16,22 @@ export class CheckpointAttestationValidator implements P2PValidator { attestationPool = mock(); validator = new FishermanAttestationValidator(epochCache, attestationPool, getTelemetryClient(), { l1PublishingTime: 12, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, }); proposer = Secp256k1Signer.random(); attester = Secp256k1Signer.random(); diff --git a/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.ts b/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.ts index 5a349757a3b0..3b83d030d9ea 100644 --- a/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.ts +++ b/yarn-project/p2p/src/msg_validators/attestation_validator/fisherman_attestation_validator.ts @@ -1,5 +1,10 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import { type CheckpointAttestation, PeerErrorSeverity, type ValidationResult } from '@aztec/stdlib/p2p'; +import { + type CheckpointAttestation, + type CoordinationSignatureContext, + PeerErrorSeverity, + type ValidationResult, +} from '@aztec/stdlib/p2p'; import { Attributes, Metrics, type TelemetryClient, createUpDownCounterWithDefault } from '@aztec/telemetry-client'; import type { AttestationPoolApi } from '../../mem_pools/attestation_pool/attestation_pool.js'; @@ -22,7 +27,9 @@ export class FishermanAttestationValidator extends CheckpointAttestationValidato telemetryClient: TelemetryClient, opts: { l1PublishingTime?: number; - } = {}, + p2pPropagationTime?: number; + signatureContext: CoordinationSignatureContext; + }, ) { super(epochCache, opts); this.logger = this.logger.createChild('[FISHERMAN]'); diff --git a/yarn-project/p2p/src/msg_validators/clock_tolerance.test.ts b/yarn-project/p2p/src/msg_validators/clock_tolerance.test.ts index f788ef041da5..57d7235c8ddc 100644 --- a/yarn-project/p2p/src/msg_validators/clock_tolerance.test.ts +++ b/yarn-project/p2p/src/msg_validators/clock_tolerance.test.ts @@ -206,6 +206,10 @@ describe('clock_tolerance', () => { }); describe('PipeliningWindow.acceptsProposal', () => { + // Config: 72s slot, 2s p2p. + // Under early pipelining, proposalWindowIntoTargetSlot = 0: only the 500ms + // clock-disparity grace keeps old-target-slot proposals acceptable after the + // receiver rolls into the next slot. let epochCache: ReturnType>; let pipeliningWindow: PipeliningWindow; @@ -220,118 +224,59 @@ describe('clock_tolerance', () => { pipeliningWindow = new PipeliningWindow(epochCache); }); - it('returns true when pipelining enabled, message is for current slot, and within grace period', () => { - // Grace period = DEFAULT_P2P_PROPAGATION_TIME * 1000 = 2000ms + it('accepts a current-slot proposal within clock-disparity grace', () => { epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1001000n, // 1000ms elapsed, within 2000ms grace period + nowMs: 1000400n, // 400ms elapsed, within 500ms grace }); expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(true); }); - it('returns true at exactly 0ms elapsed', () => { + it('rejects a current-slot proposal past the clock-disparity grace', () => { epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1000000n, // 0ms elapsed - }); - - expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(true); - }); - - it('returns false when elapsed time exceeds grace period', () => { - // 3000ms elapsed > 2000ms grace period - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: SlotNumber(100), - ts: 1000n, - nowMs: 1003000n, // 3000ms elapsed + nowMs: 1001000n, // 1000ms elapsed, past 500ms grace }); expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(false); }); - it('returns true at the propagation boundary when within clock disparity allowance', () => { - // 2000ms elapsed = DEFAULT_P2P_PROPAGATION_TIME * 1000, still within the extra 500ms allowance - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: SlotNumber(100), - ts: 1000n, - nowMs: 1002000n, // 2000ms elapsed - }); - - expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(true); - }); - - it('returns false at exactly the propagation boundary plus clock disparity allowance', () => { - // 2500ms elapsed = 2000ms propagation window + 500ms disparity allowance (not strictly less than) + it('rejects proposals for other slots regardless of elapsed time', () => { epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1002500n, // 2500ms elapsed - }); - - expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(false); - }); - - it('returns false when pipelining is disabled', () => { - epochCache.isProposerPipeliningEnabled.mockReturnValue(false); - - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: SlotNumber(100), - ts: 1000n, - nowMs: 1001000n, // 1000ms elapsed, within grace period - }); - - expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(false); - }); - - it('returns false when message is not for current slot', () => { - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: SlotNumber(100), - ts: 1000n, - nowMs: 1001000n, + nowMs: 1000000n, }); - // Message for slot 99, current slot is 100 expect(pipeliningWindow.acceptsProposal(SlotNumber(99))).toBe(false); - }); - - it('returns false when message is for a future slot', () => { - epochCache.getEpochAndSlotNow.mockReturnValue({ - epoch: 1 as any, - slot: SlotNumber(100), - ts: 1000n, - nowMs: 1001000n, - }); - - // Message for slot 101, current slot is 100 expect(pipeliningWindow.acceptsProposal(SlotNumber(101))).toBe(false); + expect(pipeliningWindow.acceptsProposal(SlotNumber(102))).toBe(false); }); - it('uses the provided propagation time instead of the default', () => { + it('rejects when pipelining is disabled', () => { + epochCache.isProposerPipeliningEnabled.mockReturnValue(false); epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1003000n, // 3000ms elapsed + nowMs: 1000000n, }); - const longerWindow = new PipeliningWindow(epochCache, { p2pPropagationTime: 4 }); - - expect(longerWindow.acceptsProposal(SlotNumber(100))).toBe(true); expect(pipeliningWindow.acceptsProposal(SlotNumber(100))).toBe(false); }); }); describe('PipeliningWindow.acceptsAttestation', () => { + // Config: 72s slot, 2s p2p. + // Under early pipelining, attestationWindowIntoTargetSlot = 2*p2p = 4s, + // giving straggler attestations 4s+500ms grace after the receiver rolls + // into the next slot. let epochCache: ReturnType>; let pipeliningWindow: PipeliningWindow; @@ -346,36 +291,39 @@ describe('clock_tolerance', () => { pipeliningWindow = new PipeliningWindow(epochCache, { l1PublishingTime: 12 }); }); - it('returns true while still before the target-slot publish cutoff', () => { + it('accepts a current-slot straggler attestation within the target-slot window', () => { epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1059000n, // 59000ms elapsed + nowMs: 1003000n, // 3000ms elapsed, within 4500ms straggler grace }); expect(pipeliningWindow.acceptsAttestation(SlotNumber(100))).toBe(true); }); - it('returns true at the target-slot publish cutoff when within clock disparity allowance', () => { + it('rejects a current-slot attestation past the straggler window', () => { epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1060000n, // 60000ms elapsed + nowMs: 1005000n, // 5000ms elapsed, past 4500ms cutoff }); - expect(pipeliningWindow.acceptsAttestation(SlotNumber(100))).toBe(true); + expect(pipeliningWindow.acceptsAttestation(SlotNumber(100))).toBe(false); }); - it('returns false at the target-slot publish cutoff plus clock disparity allowance', () => { + it('scales the straggler window with p2pPropagationTime', () => { + // With p2pPropagationTime = 4, straggler window = 8s + 500ms disparity. + const longer = new PipeliningWindow(epochCache, { l1PublishingTime: 12, p2pPropagationTime: 4 }); epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: 1 as any, slot: SlotNumber(100), ts: 1000n, - nowMs: 1060500n, // 60500ms elapsed + nowMs: 1007000n, }); + expect(longer.acceptsAttestation(SlotNumber(100))).toBe(true); expect(pipeliningWindow.acceptsAttestation(SlotNumber(100))).toBe(false); }); }); diff --git a/yarn-project/p2p/src/msg_validators/clock_tolerance.ts b/yarn-project/p2p/src/msg_validators/clock_tolerance.ts index 6eee1f56e72e..4200d9d64449 100644 --- a/yarn-project/p2p/src/msg_validators/clock_tolerance.ts +++ b/yarn-project/p2p/src/msg_validators/clock_tolerance.ts @@ -53,17 +53,24 @@ export function isWithinClockTolerance( } /** - * Checks if a message should be accepted under the pipelining grace period. + * Checks if a straggler message for the previous target slot should be accepted. * - * When pipelining is enabled, `targetSlot = slotNow + 1`. A proposal built in slot N-1 - * for slot N arrives when validators are in slot N, so their `targetSlot = N+1`. - * This function accepts proposals for the current wallclock slot if we're within the - * first `windowSeconds` seconds of the slot (the pipelining grace period). - see stdlib/timetable/index.ts + * Under pipelining, proposals and attestations carry the target slot N. Most of the + * time the receiver is either still in the build slot N-1 (accepted via the main + * `slotNumber === targetSlot` match) or in the target slot N (accepted via + * `slotNumber === nextSlot` when pipelining is disabled, or again via `targetSlot` + * when the receiver itself is pipelining). Stragglers that arrive after the receiver + * has rolled past the target slot fall to this check: accept `messageSlot === slotNow` + * while we're still within the first `windowSeconds + clock-disparity` of the slot. + * + * Under the early-pipelining schedule `windowSeconds` is small (0 for proposals, + * `2*p2pPropagationTime` for attestations) since the proposer collects everything + * before the slot boundary. * * @param messageSlot - The slot number from the received message * @param epochCache - EpochCache to get timing and pipelining state - * @param windowSeconds - The window grace period allowed for attestations into the next slot - * @returns true if pipelining is enabled, the message is for the current slot, and we're within the grace period + * @param windowSeconds - How far into the current slot we still accept previous-target messages + * @returns true if pipelining is enabled, the message is for the current wallclock slot, and we're within the grace period */ function isWithinPipeliningWindow( messageSlot: SlotNumber, diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts index acd0e5bb079c..625f87ebcbd4 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/block_proposal_validator.ts @@ -1,5 +1,5 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import type { BlockProposal, P2PValidator, ValidationResult } from '@aztec/stdlib/p2p'; +import type { BlockProposal, CoordinationSignatureContext, P2PValidator, ValidationResult } from '@aztec/stdlib/p2p'; import { ProposalValidator } from '../proposal_validator/proposal_validator.js'; @@ -8,7 +8,12 @@ export class BlockProposalValidator implements P2PValidator { constructor( epochCache: EpochCacheInterface, - opts: { txsPermitted: boolean; maxTxsPerBlock?: number; p2pPropagationTime?: number }, + opts: { + txsPermitted: boolean; + maxTxsPerBlock?: number; + p2pPropagationTime?: number; + signatureContext: CoordinationSignatureContext; + }, ) { this.proposalValidator = new ProposalValidator(epochCache, opts, 'p2p:block_proposal_validator'); } diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts index 21261084defa..fae7fe64de0f 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/checkpoint_proposal_validator.ts @@ -1,5 +1,10 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; -import type { CheckpointProposal, P2PValidator, ValidationResult } from '@aztec/stdlib/p2p'; +import type { + CheckpointProposal, + CoordinationSignatureContext, + P2PValidator, + ValidationResult, +} from '@aztec/stdlib/p2p'; import { ProposalValidator } from '../proposal_validator/proposal_validator.js'; @@ -8,7 +13,12 @@ export class CheckpointProposalValidator implements P2PValidator { const currentSlot = SlotNumber(100); const nextSlot = SlotNumber(101); const previousSlot = SlotNumber(99); + const foreignSignatureContext = { + ...TEST_COORDINATION_SIGNATURE_CONTEXT, + chainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId + 1, + }; let epochCache: MockProxy; let validator: ProposalValidator; @@ -47,7 +52,12 @@ describe('ProposalValidator', () => { } as any); validator = new ProposalValidator( epochCache, - { txsPermitted: true, maxTxsPerBlock: undefined, p2pPropagationTime: 2 }, + { + txsPermitted: true, + maxTxsPerBlock: undefined, + p2pPropagationTime: 2, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, + }, 'test', ); epochCache.getEpochAndSlotNow.mockReturnValue({ @@ -68,15 +78,38 @@ describe('ProposalValidator', () => { describe.each([ { name: 'block proposal', - factory: (slotNumber: SlotNumber, signer: Secp256k1Signer) => - makeBlockProposal({ blockHeader: makeBlockHeader(0, { slotNumber }), signer }), + factory: ( + slotNumber: SlotNumber, + signer: Secp256k1Signer, + signatureContext = TEST_COORDINATION_SIGNATURE_CONTEXT, + ) => + makeBlockProposal({ + blockHeader: makeBlockHeader(0, { slotNumber }), + signer, + signatureContext, + }), }, { name: 'checkpoint proposal', - factory: (slotNumber: SlotNumber, signer: Secp256k1Signer) => - makeCheckpointProposal({ checkpointHeader: makeCheckpointHeader(0, { slotNumber }), signer }), + factory: ( + slotNumber: SlotNumber, + signer: Secp256k1Signer, + signatureContext = TEST_COORDINATION_SIGNATURE_CONTEXT, + ) => + makeCheckpointProposal({ + checkpointHeader: makeCheckpointHeader(0, { slotNumber }), + signer, + signatureContext, + }), }, ])('validate with $name', ({ factory }) => { + it('rejects foreign signature context with low tolerance error', async () => { + const proposal = await factory(currentSlot, Secp256k1Signer.random(), foreignSignatureContext); + + const result = await validator.validate(proposal); + expect(result).toEqual({ result: 'reject', severity: PeerErrorSeverity.LowToleranceError }); + }); + it('rejects with high tolerance error if slot is outside clock tolerance', async () => { const proposal = await factory(previousSlot, Secp256k1Signer.random()); @@ -180,8 +213,9 @@ describe('ProposalValidator', () => { expect(result).toEqual({ result: 'accept' }); }); - it('accepts proposal for current slot within pipelining grace period', async () => { - // Simulate pipelining: targetSlot = 101, but proposal is for slot 100 (current wallclock slot) + it('accepts proposal for current slot within pipelining clock-disparity grace', async () => { + // Simulate pipelining: targetSlot = 101, but proposal is for slot 100 (current wallclock slot). + // Under early pipelining, proposalWindowIntoTargetSlot = 0, so only the 500ms clock-disparity grace applies. epochCache.getTargetAndNextSlot.mockReturnValue({ targetSlot: SlotNumber(101), nextSlot: SlotNumber(102), @@ -189,12 +223,11 @@ describe('ProposalValidator', () => { epochCache.getSlotNow.mockReturnValue(currentSlot); // slot 100 epochCache.isProposerPipeliningEnabled.mockReturnValue(true); - // Within grace period: 1000ms elapsed < configured propagation window 2000ms epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: EpochNumber(1), slot: currentSlot, ts: 1000n, - nowMs: 1001000n, // 1000ms elapsed + nowMs: 1000400n, // 400ms elapsed, within 500ms grace }); const signer = Secp256k1Signer.random(); @@ -205,8 +238,7 @@ describe('ProposalValidator', () => { expect(result).toEqual({ result: 'accept' }); }); - it('rejects proposal for current slot outside pipelining grace period', async () => { - // Simulate pipelining: targetSlot = 101, but proposal is for slot 100 (current wallclock slot) + it('rejects proposal for current slot outside pipelining clock-disparity grace', async () => { epochCache.getTargetAndNextSlot.mockReturnValue({ targetSlot: SlotNumber(101), nextSlot: SlotNumber(102), @@ -215,12 +247,11 @@ describe('ProposalValidator', () => { epochCache.getSlotNow.mockReturnValue(currentSlot); // slot 100 epochCache.isProposerPipeliningEnabled.mockReturnValue(true); - // Outside grace period: 7000ms elapsed > configured propagation window 2000ms epochCache.getEpochAndSlotNow.mockReturnValue({ epoch: EpochNumber(1), slot: currentSlot, ts: 1000n, - nowMs: 1007000n, // 7000ms elapsed + nowMs: 1007000n, // 7000ms elapsed, well past 500ms grace }); const signer = Secp256k1Signer.random(); @@ -235,7 +266,15 @@ describe('ProposalValidator', () => { describe('validateTxs', () => { describe('txsPermitted', () => { it('rejects proposal with txHashes when txs not permitted', async () => { - validator = new ProposalValidator(epochCache, { txsPermitted: false, maxTxsPerBlock: undefined }, 'test'); + validator = new ProposalValidator( + epochCache, + { + txsPermitted: false, + maxTxsPerBlock: undefined, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, + }, + 'test', + ); const proposal = await makeBlockProposal({ txHashes: [TxHash.random(), TxHash.random()] }); const result = await validator.validateTxs(proposal); @@ -243,7 +282,15 @@ describe('ProposalValidator', () => { }); it('accepts proposal with no txHashes when txs not permitted', async () => { - validator = new ProposalValidator(epochCache, { txsPermitted: false, maxTxsPerBlock: undefined }, 'test'); + validator = new ProposalValidator( + epochCache, + { + txsPermitted: false, + maxTxsPerBlock: undefined, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, + }, + 'test', + ); const proposal = await makeBlockProposal({ txHashes: [] }); const result = await validator.validateTxs(proposal); @@ -283,7 +330,15 @@ describe('ProposalValidator', () => { describe('maxTxsPerBlock', () => { it('rejects when txHashes exceed maxTxsPerBlock', async () => { - validator = new ProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }, 'test'); + validator = new ProposalValidator( + epochCache, + { + txsPermitted: true, + maxTxsPerBlock: 2, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, + }, + 'test', + ); const proposal = await makeBlockProposal({ txHashes: Array.from({ length: 3 }, () => TxHash.random()) }); const result = await validator.validateTxs(proposal); @@ -291,7 +346,15 @@ describe('ProposalValidator', () => { }); it('accepts when txHashes count equals maxTxsPerBlock', async () => { - validator = new ProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: 2 }, 'test'); + validator = new ProposalValidator( + epochCache, + { + txsPermitted: true, + maxTxsPerBlock: 2, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, + }, + 'test', + ); const proposal = await makeBlockProposal({ txHashes: Array.from({ length: 2 }, () => TxHash.random()) }); const result = await validator.validateTxs(proposal); diff --git a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts index 2997a71a0a12..d04c6f01c7a2 100644 --- a/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts +++ b/yarn-project/p2p/src/msg_validators/proposal_validator/proposal_validator.ts @@ -4,8 +4,10 @@ import { type Logger, createLogger } from '@aztec/foundation/log'; import { type BlockProposal, type CheckpointProposalCore, + type CoordinationSignatureContext, PeerErrorSeverity, type ValidationResult, + hasValidSignatureContext, } from '@aztec/stdlib/p2p'; import { PipeliningWindow, isWithinClockTolerance } from '../clock_tolerance.js'; @@ -17,22 +19,40 @@ export class ProposalValidator { private txsPermitted: boolean; private maxTxsPerBlock?: number; private pipeliningWindow: PipeliningWindow; + private signatureContext: CoordinationSignatureContext; constructor( epochCache: EpochCacheInterface, - opts: { txsPermitted: boolean; maxTxsPerBlock?: number; p2pPropagationTime?: number }, + opts: { + txsPermitted: boolean; + maxTxsPerBlock?: number; + p2pPropagationTime?: number; + signatureContext: CoordinationSignatureContext; + }, loggerName: string, ) { this.epochCache = epochCache; this.txsPermitted = opts.txsPermitted; this.maxTxsPerBlock = opts.maxTxsPerBlock; this.pipeliningWindow = new PipeliningWindow(epochCache, { p2pPropagationTime: opts.p2pPropagationTime }); + this.signatureContext = opts.signatureContext; this.logger = createLogger(loggerName); } /** Validates header-level fields: slot, signature, and proposer. */ public async validate(proposal: BlockProposal | CheckpointProposalCore): Promise { try { + // Cross-chain replay check: reject proposals that carry a foreign signing domain. + if (!hasValidSignatureContext(proposal, this.signatureContext)) { + this.logger.warn(`Penalizing peer for proposal with foreign signature context`, { + chainId: proposal.signatureContext.chainId, + rollupAddress: proposal.signatureContext.rollupAddress.toString(), + expectedChainId: this.signatureContext.chainId, + expectedRollupAddress: this.signatureContext.rollupAddress.toString(), + }); + return { result: 'reject', severity: PeerErrorSeverity.LowToleranceError }; + } + // Slot check: use target slots since proposals target pipeline slots (slot + 1 when pipelining). const { targetSlot, nextSlot } = this.epochCache.getTargetAndNextSlot(); diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts index 987bf9d85a36..be2ce9f19994 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.test.ts @@ -3,7 +3,6 @@ import { BlockNumber, IndexWithinCheckpoint, SlotNumber } from '@aztec/foundatio import { getDefaultConfig } from '@aztec/foundation/config'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; -import { EthAddress } from '@aztec/foundation/eth-address'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/lmdb'; import type { L2BlockSource } from '@aztec/stdlib/block'; @@ -12,6 +11,7 @@ import { GasFees } from '@aztec/stdlib/gas'; import type { ClientProtocolCircuitVerifier } from '@aztec/stdlib/interfaces/server'; import { BlockProposal, PeerErrorSeverity } from '@aztec/stdlib/p2p'; import { + TEST_COORDINATION_SIGNATURE_CONTEXT, makeBlockHeader, makeBlockProposal, makeCheckpointHeader, @@ -1058,10 +1058,10 @@ class TestLibP2PService extends LibP2PService { seenMessageCacheSize: 1000, debugP2PInstrumentMessages: false, disableTransactions: false, - l1ChainId: 1, + l1ChainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId, rollupVersion: 1, l1Contracts: { - rollupAddress: EthAddress.random(), + rollupAddress: TEST_COORDINATION_SIGNATURE_CONTEXT.rollupAddress, }, }; diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 99679c8c730a..c93f53e89494 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -1,6 +1,6 @@ import type { EpochCacheInterface } from '@aztec/epoch-cache'; import { BlockNumber, type SlotNumber } from '@aztec/foundation/branded-types'; -import { maxBy } from '@aztec/foundation/collection'; +import { maxBy, merge } from '@aztec/foundation/collection'; import { type Logger, createLibp2pComponentLogger, createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { Timer } from '@aztec/foundation/timer'; @@ -232,14 +232,21 @@ export class LibP2PService extends WithTracer implements P2PService { txsPermitted: !config.disableTransactions, maxTxsPerBlock: config.validateMaxTxsPerBlock ?? config.validateMaxTxsPerCheckpoint, p2pPropagationTime, + signatureContext: { + chainId: config.l1ChainId, + rollupAddress: config.l1Contracts.rollupAddress, + }, }; this.blockProposalValidator = new BlockProposalValidator(epochCache, proposalValidatorOpts); this.checkpointProposalValidator = new CheckpointProposalValidator(epochCache, proposalValidatorOpts); + const attestationValidatorOpts = { + l1PublishingTime: config.l1PublishingTime, + p2pPropagationTime, + signatureContext: proposalValidatorOpts.signatureContext, + }; this.checkpointAttestationValidator = config.fishermanMode - ? new FishermanAttestationValidator(epochCache, mempools.attestationPool, telemetry, { - l1PublishingTime: config.l1PublishingTime, - }) - : new CheckpointAttestationValidator(epochCache, { l1PublishingTime: config.l1PublishingTime }); + ? new FishermanAttestationValidator(epochCache, mempools.attestationPool, telemetry, attestationValidatorOpts) + : new CheckpointAttestationValidator(epochCache, attestationValidatorOpts); this.gossipSubEventHandler = this.handleGossipSubEvent.bind(this); @@ -264,8 +271,9 @@ export class LibP2PService extends WithTracer implements P2PService { }; } - public updateConfig(config: Partial) { + public updateConfig(config: Partial>) { this.reqresp.updateConfig(config); + this.config = merge(this.config, config); } /** @@ -842,6 +850,15 @@ export class LibP2PService extends WithTracer implements P2PService { // Process the message, optionally within a linked span for trace propagation const processMessage = async () => { + if ( + this.config.skipIncomingProposals && + (msg.topic === this.topicStrings[TopicType.block_proposal] || + msg.topic === this.topicStrings[TopicType.checkpoint_proposal]) + ) { + this.logger.warn(`Ignoring incoming proposal (skipIncomingProposals is set)`, { topic: msg.topic }); + this.node.services.pubsub.reportMessageValidationResult(msgId, source.toString(), TopicValidatorResult.Ignore); + return; + } if (msg.topic === this.topicStrings[TopicType.tx]) { await this.handleGossipedTx(p2pMessage.payload, msgId, source); } else if (msg.topic === this.topicStrings[TopicType.checkpoint_attestation]) { diff --git a/yarn-project/prover-client/src/proving_broker/config.ts b/yarn-project/prover-client/src/proving_broker/config.ts index a3f06628a89b..7b5ab7114aab 100644 --- a/yarn-project/prover-client/src/proving_broker/config.ts +++ b/yarn-project/prover-client/src/proving_broker/config.ts @@ -4,6 +4,7 @@ import { booleanConfigHelper, getDefaultConfig, numberConfigHelper, + optionalNumberConfigHelper, } from '@aztec/foundation/config'; import { pickConfigMappings } from '@aztec/foundation/config'; import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config'; @@ -73,7 +74,7 @@ export const proverBrokerConfigMappings: ConfigMappingsType }, proverBrokerStoreMapSizeKb: { env: 'PROVER_BROKER_STORE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: "The size of the prover broker's database. Will override the dataStoreMapSizeKb if set.", }, proverBrokerDebugReplayEnabled: { diff --git a/yarn-project/prover-node/src/prover-node-publisher.ts b/yarn-project/prover-node/src/prover-node-publisher.ts index ac0c0c7d14ff..b045e7cd912b 100644 --- a/yarn-project/prover-node/src/prover-node-publisher.ts +++ b/yarn-project/prover-node/src/prover-node-publisher.ts @@ -364,9 +364,9 @@ export class ProverNodePublisher { end: argsArray[1], args: argsArray[2], fees: argsArray[3], - attestations: new CommitteeAttestationsAndSigners( + attestations: CommitteeAttestationsAndSigners.packAttestations( args.attestations.map(a => CommitteeAttestation.fromViem(a)), - ).getPackedAttestations(), + ), blobInputs: argsArray[4], proof: proofHex, }; diff --git a/yarn-project/pxe/src/block_synchronizer/block_stream_source.ts b/yarn-project/pxe/src/block_synchronizer/block_stream_source.ts new file mode 100644 index 000000000000..507eb02699db --- /dev/null +++ b/yarn-project/pxe/src/block_synchronizer/block_stream_source.ts @@ -0,0 +1,52 @@ +import type { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { L2Block, type L2BlockSource } from '@aztec/stdlib/block'; +import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint'; +import type { AztecNode } from '@aztec/stdlib/interfaces/client'; + +// TODO(spl/new-rpc-api): delete once `L2BlockStream` is refactored to consume the new +// `BlockResponse` / `CheckpointResponse` shapes. For now the stream requires concrete `L2Block` +// and `PublishedCheckpoint` instances, so we rehydrate them from RPC responses. +/** + * Lifts an {@link AztecNode} RPC client into the shape {@link L2BlockStream} expects. `getBlocks` + * requests transaction bodies so that real `L2Block` instances can be constructed; + * `getCheckpoints` requests blocks + L1 info + attestations so that `PublishedCheckpoint` + * instances are fully populated. + */ +export function blockStreamSourceFromAztecNode( + node: AztecNode, +): Pick { + return { + getL2Tips: () => node.getL2Tips(), + getBlockHeader: number => node.getBlockHeader(number), + getCheckpointedBlocks: (from: BlockNumber, limit: number) => node.getCheckpointedBlocks(from, limit), + + async getBlocks(from: BlockNumber, limit: number): Promise { + const responses = await node.getBlocks(from, limit, { includeTransactions: true }); + return responses.map(r => new L2Block(r.archive, r.header, r.body!, r.checkpointNumber, r.indexWithinCheckpoint)); + }, + + async getCheckpoints(from: CheckpointNumber, limit: number): Promise { + const responses = await node.getCheckpoints(from, limit, { + includeBlocks: true, + includeTransactions: true, + includeL1PublishInfo: true, + includeAttestations: true, + }); + return responses.map(r => { + const checkpoint = new Checkpoint( + r.archive, + r.header, + r.blocks!.map(b => new L2Block(b.archive, b.header, b.body!, b.checkpointNumber, b.indexWithinCheckpoint)), + r.number, + r.feeAssetPriceModifier, + ); + const l1 = + r.l1?.published === true + ? new L1PublishedData(r.l1.blockNumber, r.l1.timestamp, r.l1.blockHash) + : new L1PublishedData(0n, 0n, Fr.ZERO.toString()); + return new PublishedCheckpoint(checkpoint, l1, r.attestations ?? []); + }); + }, + }; +} diff --git a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts index c052adf3e163..fc82fb6dcb53 100644 --- a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts +++ b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts @@ -71,10 +71,17 @@ describe('BlockSynchronizer', () => { it('removes notes from db on a reorg', async () => { const rollback = jest.spyOn(noteStore, 'rollback').mockImplementation(() => Promise.resolve()); const block3Hash = Fr.fromString('0x3'); - aztecNode.getBlockHeader.mockImplementation(async block => { - // For the test, when block hash matches block 3, return block header for block 3 + aztecNode.getBlock.mockImplementation(async (block: any) => { if (block instanceof BlockHash && block.equals(block3Hash)) { - return (await L2Block.random(BlockNumber(3))).header; + const b = await L2Block.random(BlockNumber(3)); + return { + header: b.header, + archive: b.archive, + hash: await b.hash(), + checkpointNumber: b.checkpointNumber, + indexWithinCheckpoint: b.indexWithinCheckpoint, + number: b.number, + } as any; } return undefined; }); @@ -95,10 +102,17 @@ describe('BlockSynchronizer', () => { it('removes private events from db on a reorg', async () => { const rollback = jest.spyOn(privateEventStore, 'rollback').mockImplementation(() => Promise.resolve()); const block3Hash = Fr.fromString('0x3'); - aztecNode.getBlockHeader.mockImplementation(async block => { - // For the test, when block hash matches block 3, return block header for block 3 + aztecNode.getBlock.mockImplementation(async (block: any) => { if (block instanceof BlockHash && block.equals(block3Hash)) { - return (await L2Block.random(BlockNumber(3))).header; + const b = await L2Block.random(BlockNumber(3)); + return { + header: b.header, + archive: b.archive, + hash: await b.hash(), + checkpointNumber: b.checkpointNumber, + indexWithinCheckpoint: b.indexWithinCheckpoint, + number: b.number, + } as any; } return undefined; }); @@ -128,7 +142,15 @@ describe('BlockSynchronizer', () => { resolveSync = resolve; }); blockStream.sync.mockReturnValue(syncBlocker); - aztecNode.getBlockHeader.mockResolvedValue((await L2Block.random(BlockNumber(0))).header); + const genesisBlock = await L2Block.random(BlockNumber(0)); + aztecNode.getBlock.mockResolvedValue({ + header: genesisBlock.header, + archive: genesisBlock.archive, + hash: await genesisBlock.hash(), + checkpointNumber: genesisBlock.checkpointNumber, + indexWithinCheckpoint: genesisBlock.indexWithinCheckpoint, + number: genesisBlock.number, + } as any); // Start a sync (don't await) const syncPromise = synchronizer.sync(); @@ -234,7 +256,14 @@ describe('BlockSynchronizer', () => { // Mock node to return block header const provenBlock = await L2Block.random(BlockNumber(5)); - aztecNode.getBlockHeader.mockResolvedValue(provenBlock.header); + aztecNode.getBlock.mockResolvedValue({ + header: provenBlock.header, + archive: provenBlock.archive, + hash: await provenBlock.hash(), + checkpointNumber: provenBlock.checkpointNumber, + indexWithinCheckpoint: provenBlock.indexWithinCheckpoint, + number: provenBlock.number, + } as any); await synchronizer.handleBlockStreamEvent({ type: 'chain-proven', @@ -254,7 +283,14 @@ describe('BlockSynchronizer', () => { // Mock node to return block header const finalizedBlock = await L2Block.random(BlockNumber(10)); - aztecNode.getBlockHeader.mockResolvedValue(finalizedBlock.header); + aztecNode.getBlock.mockResolvedValue({ + header: finalizedBlock.header, + archive: finalizedBlock.archive, + hash: await finalizedBlock.hash(), + checkpointNumber: finalizedBlock.checkpointNumber, + indexWithinCheckpoint: finalizedBlock.indexWithinCheckpoint, + number: finalizedBlock.number, + } as any); await synchronizer.handleBlockStreamEvent({ type: 'chain-finalized', diff --git a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts index ea2f889d529e..4a5d4fc15871 100644 --- a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts +++ b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts @@ -12,6 +12,7 @@ import type { ContractSyncService } from '../contract_sync/contract_sync_service import type { AnchorBlockStore } from '../storage/anchor_block_store/anchor_block_store.js'; import type { NoteStore } from '../storage/note_store/note_store.js'; import type { PrivateEventStore } from '../storage/private_event_store/private_event_store.js'; +import { blockStreamSourceFromAztecNode } from './block_stream_source.js'; /** * The BlockSynchronizer class orchestrates synchronization between PXE and Aztec node, maintaining an up-to-date @@ -42,7 +43,7 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { protected createBlockStream(config: Partial): L2BlockStream { return new L2BlockStream( - this.node, + blockStreamSourceFromAztecNode(this.node), this.l2TipsStore, this, createLogger('pxe:block_stream', this.log.getBindings()), @@ -81,9 +82,9 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { } case 'chain-proven': { if (this.config.syncChainTip === 'proven') { - const blockHeader = await this.node.getBlockHeader(BlockNumber(event.block.number)); - if (blockHeader) { - await this.updateAnchorBlockHeader(blockHeader); + const block = await this.node.getBlock(BlockNumber(event.block.number)); + if (block) { + await this.updateAnchorBlockHeader(block.header); } else { this.log.warn(`Block header not found for proven block ${event.block.number}, skipping anchor update`); } @@ -92,9 +93,9 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { } case 'chain-finalized': { if (this.config.syncChainTip === 'finalized') { - const blockHeader = await this.node.getBlockHeader(BlockNumber(event.block.number)); - if (blockHeader) { - await this.updateAnchorBlockHeader(blockHeader); + const block = await this.node.getBlock(BlockNumber(event.block.number)); + if (block) { + await this.updateAnchorBlockHeader(block.header); } else { this.log.warn(`Block header not found for finalized block ${event.block.number}, skipping anchor update`); } @@ -117,7 +118,8 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { // Note that the following is not necessarily the anchor block that will be used in the transaction - if // the chain has already moved past the reorg, we'll also see blocks-added events that will push the anchor // forward. - const newAnchorBlockHeader = await this.node.getBlockHeader(BlockHash.fromString(event.block.hash)); + const newAnchorBlock = await this.node.getBlock(BlockHash.fromString(event.block.hash)); + const newAnchorBlockHeader = newAnchorBlock?.header; if (!newAnchorBlockHeader) { throw new Error( @@ -191,7 +193,7 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { } if (!currentHeader) { // REFACTOR: We should know the header of the genesis block without having to request it from the node. - await this.anchorBlockStore.setHeader((await this.node.getBlockHeader(BlockNumber.ZERO))!); + await this.anchorBlockStore.setHeader((await this.node.getBlock(BlockNumber.ZERO))!.header); } await this.blockStream.sync(); } diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts index 450903a01f5f..b2424f43d61d 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts @@ -904,7 +904,8 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra const [response] = await Promise.all([ query(), (async () => { - const header = await this.aztecNode.getBlockHeader(blockHash); + const block = await this.aztecNode.getBlock(blockHash); + const header = block?.header; if (!header) { throw new Error(`Could not find block header for block hash ${blockHash}`); } diff --git a/yarn-project/pxe/src/pxe.test.ts b/yarn-project/pxe/src/pxe.test.ts index b9d62d25b4bb..6cecfeb2cc39 100644 --- a/yarn-project/pxe/src/pxe.test.ts +++ b/yarn-project/pxe/src/pxe.test.ts @@ -1,6 +1,6 @@ import { BBBundlePrivateKernelProver } from '@aztec/bb-prover/client/bundle'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; -import { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { BlockNumber, CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; import { AztecLMDBStoreV2, openTmpStore } from '@aztec/kv-store/lmdb-v2'; @@ -18,6 +18,7 @@ import { randomContractInstanceWithAddress, randomDeployedContract, } from '@aztec/stdlib/testing'; +import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; import { BlockHeader, GlobalVariables, TxHash } from '@aztec/stdlib/tx'; import { mock } from 'jest-mock-extended'; @@ -183,6 +184,14 @@ describe('PXE', () => { globalVariables, }); node.getBlockHeader.mockResolvedValue(blockHeader); + node.getBlock.mockResolvedValue({ + header: blockHeader, + archive: AppendOnlyTreeSnapshot.empty(), + hash: GENESIS_BLOCK_HEADER_HASH, + checkpointNumber: CheckpointNumber.fromBlockNumber(lastKnownBlockNumber), + indexWithinCheckpoint: IndexWithinCheckpoint.ZERO, + number: lastKnownBlockNumber, + } as any); // Mock getL2Tips which is needed for syncing tagged logs const tipId = { diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 6626b8e6f324..d503e2621070 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -5,6 +5,7 @@ import { booleanConfigHelper, getConfigFromMappings, numberConfigHelper, + optionalNumberConfigHelper, pickConfigMappings, } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -83,7 +84,7 @@ export const sequencerConfigMappings: ConfigMappingsType = { maxTxsPerCheckpoint: { env: 'SEQ_MAX_TX_PER_CHECKPOINT', description: 'The maximum number of txs across all blocks in a checkpoint.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, minTxsPerBlock: { env: 'SEQ_MIN_TX_PER_BLOCK', @@ -102,12 +103,12 @@ export const sequencerConfigMappings: ConfigMappingsType = { maxL2BlockGas: { env: 'SEQ_MAX_L2_BLOCK_GAS', description: 'The maximum L2 block gas.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, maxDABlockGas: { env: 'SEQ_MAX_DA_BLOCK_GAS', description: 'The maximum DA block gas.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, perBlockAllocationMultiplier: { env: 'SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER', @@ -153,7 +154,7 @@ export const sequencerConfigMappings: ConfigMappingsType = { l1PublishingTime: { env: 'SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT', description: 'How much time (in seconds) we allow in the slot for publishing the L1 tx (defaults to 1 L1 slot).', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, fakeProcessingDelayPerTxMs: { description: 'Used for testing to introduce a fake delay after processing each tx', @@ -228,6 +229,10 @@ export const sequencerConfigMappings: ConfigMappingsType = { description: 'Percent probability (0 - 100) of sequencer skipping checkpoint publishing (testing only)', ...numberConfigHelper(DefaultSequencerConfig.skipPublishingCheckpointsPercent), }, + skipBroadcastProposals: { + description: 'Skip broadcasting checkpoint and block proposals via gossipsub when proposer (for testing only)', + ...booleanConfigHelper(false), + }, ...pickConfigMappings(p2pConfigMappings, ['txPublicSetupAllowListExtend']), }; diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts index f492813341b4..c565a79244c9 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.test.ts @@ -56,6 +56,10 @@ describe('compareActions sorting', () => { const mockRollupAddress = EthAddress.random().toString(); const mockGovernanceProposerAddress = EthAddress.random().toString(); const mockForwarderAddress = EthAddress.random().toString(); +const testSignatureContext = { + chainId: 1, + rollupAddress: EthAddress.fromString(mockRollupAddress), +}; describe('SequencerPublisher', () => { let rollup: MockProxy; @@ -207,7 +211,11 @@ describe('SequencerPublisher', () => { it('bundles propose and vote tx to l1', async () => { const checkpoint = new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber); const expectedBlobs = await getBlobsPerL1Block(checkpoint.toBlobFields()); - await publisher.enqueueProposeCheckpoint(checkpoint, CommitteeAttestationsAndSigners.empty(), Signature.empty()); + await publisher.enqueueProposeCheckpoint( + checkpoint, + CommitteeAttestationsAndSigners.empty(testSignatureContext), + Signature.empty(), + ); const { govPayload, voteSig } = mockGovernancePayload(); @@ -239,7 +247,7 @@ describe('SequencerPublisher', () => { feeAssetPriceModifier: 0n, }, }, - CommitteeAttestationsAndSigners.empty().getPackedAttestations(), + CommitteeAttestationsAndSigners.packAttestations([]), [], Signature.empty().toViemSignature(), blobInput, @@ -290,7 +298,7 @@ describe('SequencerPublisher', () => { await publisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); const result = await publisher.sendRequests(); @@ -351,7 +359,7 @@ describe('SequencerPublisher', () => { await rotatingPublisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); const result = await rotatingPublisher.sendRequests(); @@ -389,7 +397,7 @@ describe('SequencerPublisher', () => { await rotatingPublisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); // TimeoutError propagates to the outer catch in sendRequests which returns undefined @@ -408,7 +416,7 @@ describe('SequencerPublisher', () => { await rotatingPublisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); const result = await rotatingPublisher.sendRequests(); @@ -423,7 +431,7 @@ describe('SequencerPublisher', () => { await rotatingPublisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); const result = await rotatingPublisher.sendRequests(); @@ -441,7 +449,7 @@ describe('SequencerPublisher', () => { await expect( publisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ), ).rejects.toThrow(); @@ -461,7 +469,7 @@ describe('SequencerPublisher', () => { await publisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); const result = await publisher.sendRequests(); @@ -485,7 +493,7 @@ describe('SequencerPublisher', () => { ); await publisher.enqueueProposeCheckpoint( new Checkpoint(l2Block.archive, header, [l2Block], l2Block.checkpointNumber), - CommitteeAttestationsAndSigners.empty(), + CommitteeAttestationsAndSigners.empty(testSignatureContext), Signature.empty(), ); publisher.interrupt(); diff --git a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts index 29ce86960587..b82a078112c0 100644 --- a/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/sequencer-publisher.ts @@ -709,7 +709,7 @@ export class SequencerPublisher { const args = [ header.toViem(), - CommitteeAttestationsAndSigners.empty().getPackedAttestations(), + CommitteeAttestationsAndSigners.packAttestations([]), [], // no signers Signature.empty().toViemSignature(), `0x${'0'.repeat(64)}`, // 32 empty bytes @@ -850,9 +850,7 @@ export class SequencerPublisher { const logData = { ...checkpoint, reason }; this.log.debug(`Building invalidate checkpoint ${checkpoint.checkpointNumber} request`, logData); - const attestationsAndSigners = new CommitteeAttestationsAndSigners( - validationResult.attestations, - ).getPackedAttestations(); + const attestationsAndSigners = CommitteeAttestationsAndSigners.packAttestations(validationResult.attestations); if (reason === 'invalid-attestation') { return this.rollupContract.buildInvalidateBadAttestationRequest( diff --git a/yarn-project/sequencer-client/src/sequencer/README.md b/yarn-project/sequencer-client/src/sequencer/README.md index cd45b846babd..324eafa13fff 100644 --- a/yarn-project/sequencer-client/src/sequencer/README.md +++ b/yarn-project/sequencer-client/src/sequencer/README.md @@ -48,7 +48,7 @@ In a typical configuration without pipelining, a 72-second slot contains: - 1 last validator re-execution sub-slot (8 seconds) - 1 attestation and publishing period (17 seconds) -With proposer pipelining enabled, the last validator re-execution sub-slot is still reserved, but the checkpoint finalization and L1 publishing budget is no longer subtracted when deciding how many block-building sub-slots fit in the slot. +With proposer pipelining enabled, the last validator re-execution sub-slot is still reserved, but L1 publishing is deferred to the target slot and removed from the current slot budget. Attestation collection is completed inside the build slot itself, so the proposer can send the L1 transaction immediately at the target-slot boundary. ### The Fixed Sub-Slot Model @@ -89,7 +89,8 @@ timeReservedAtEnd (normal mode) = blockDuration (last sub-slot for + checkpointFinalizationTime timeReservedAtEnd (pipelining) = assembleTime - + propagationTime (proposal must reach validators before the slot flips) + + 2 * propagationTime (proposal out + attestations back) + + blockDuration (last-block re-execution) timeAvailableForBlocks = slotDuration - initializationOffset - timeReservedAtEnd @@ -110,47 +111,45 @@ This means: **The same slot with proposer pipelining enabled:** ``` -timeReservedAtEnd = 1s + 2s = 3s -timeAvailableForBlocks = 72s - 2s - 3s = 67s -numberOfBlocks = floor(67s / 8s) = 8 blocks +timeReservedAtEnd = 1s + 2*2s + 8s = 13s +timeAvailableForBlocks = 72s - 2s - 13s = 57s +numberOfBlocks = floor(57s / 8s) = 7 blocks ``` -The extra two block opportunities come from not charging the current slot for checkpoint finalization and L1 publishing. +The extra two block opportunities come from not charging the current slot for L1 publishing. The proposal broadcast, attestation round-trip, and last-block re-execution are now all reserved inside the build slot so that attestations are in hand at the slot boundary. ### Pipelining Mode -When proposer pipelining is enabled, the sequencer uses the current wall-clock slot to build the checkpoint for the **next target slot**. +When proposer pipelining is enabled, the sequencer uses the current wall-clock slot to build the checkpoint for the **next target slot**, and finishes collecting attestations before the slot boundary so that L1 publishing can happen immediately at the target-slot boundary. It helps to think in terms of two different slots: -- **Wall-clock slot N-1**: The sequencer initializes checkpoint `N`, builds its blocks, and validators re-execute the last block -- **Target slot N**: Checkpoint `N` is proposed, attestations are gathered, and the L1 transaction is submitted +- **Wall-clock slot N-1**: The sequencer initializes checkpoint `N`, builds its blocks, validators re-execute the last block, and attestations are gathered +- **Target slot N**: The checkpoint is submitted to L1 So the work is split like this: -- **During slot N-1**: Initialization, block building, and last-block re-execution -- **Near the end of slot N-1**: The checkpoint proposal is broadcast so validators can start the last re-execution as slot `N` begins. -- **During slot N**: Validators finish re-executing, send attestations, the proposer collects them, and the checkpoint is submitted to L1 before slot `N` reaches its publish cutoff +- **During slot N-1**: Initialization, block building, last-block re-execution, proposal broadcast, and attestation collection +- **At the start of slot N**: The L1 transaction is submitted — attestations are already in hand -In other words, pipelining does not mean "do everything for slot N earlier". It specifically moves **block production and block re-execution** earlier, while **checkpoint proposal, attestation gathering, and L1 submission** remain aligned with slot `N`. +In other words, pipelining moves **block production, block re-execution, proposal broadcast, and attestation collection** into the build slot, while **L1 submission** happens aligned with slot `N`. With default values (72s slot, 6s block, 2s p2p, 1s assemble), the last build-slot block finishes at `T = slotDuration - timeReservedAtEnd = 61s`, the proposer broadcasts the checkpoint at `T=62s` after `assembleTime=1s`, and attestations are in hand by `T=72s` (the slot boundary). **Example: building checkpoint 12 while wall-clock time is in slot 11** ``` Slot 11 (wall clock): - Build blocks that will make up checkpoint 12 -- Validators re-execute the last block of checkpoint 12 - Broadcast checkpoint 12 proposal -- Collect checkpoint 12 attestations +- Validators re-execute the last block of checkpoint 12 +- Collect checkpoint 12 attestations (all complete before slot 11 ends) Slot 12 (target/submission slot): -- Collect attestations for checkpoint 12 until slot 12 reaches its L1 publish cutoff -- Submit checkpoint 12 to L1 +- Submit checkpoint 12 to L1 at the slot boundary ``` -For timetable purposes, this changes two things: +For timetable purposes: -- `maxNumberOfBlocks` is computed by reserving only the final validator re-execution sub-slot -- `initializeDeadline` no longer subtracts checkpoint finalization time; it only requires enough time for initialization, execution, and validator re-execution +- `maxNumberOfBlocks` is computed by reserving assembly + round-trip p2p + last-block re-execution at the end of the slot +- `initializeDeadline` no longer subtracts checkpoint finalization time; it only requires enough time for initialization and two execution windows In code, that means: @@ -162,7 +161,7 @@ initializeDeadline (pipelining) = slotDuration - initializationOffset - 2 * minExecutionTime ``` -The fixed sub-slot deadlines themselves do not change. Pipelining only changes how much of the slot is considered available for block building. +The fixed sub-slot deadlines themselves do not change. Pipelining only changes how much of the slot is considered available for block building, and when the broadcast and attestation windows close. ## The Sequencer's Work diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts index 317ee9069c9a..edc24ecb5840 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.test.ts @@ -41,7 +41,7 @@ import { type WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; -import { BlockProposal, CheckpointProposal } from '@aztec/stdlib/p2p'; +import { BlockProposal, CheckpointProposal, type CoordinationSignatureContext } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; import { type FailedTx, GlobalVariables, type Tx } from '@aztec/stdlib/tx'; @@ -121,6 +121,10 @@ describe('CheckpointProposalJob', () => { const committee = [signer.address]; const attestorAddress = EthAddress.random(); const proposer = EthAddress.random(); + const signatureContext: CoordinationSignatureContext = { + chainId: chainId.toNumber(), + rollupAddress: EthAddress.random(), + }; const getSignatures = () => [mockedAttestation]; @@ -247,22 +251,36 @@ describe('CheckpointProposalJob', () => { archiveRoot, txHashes, mockedSig, + signatureContext, ); }, ); validatorClient.createCheckpointProposal.mockImplementation( async (checkpointHeader, archiveRoot, _checkpointNumber, feeAssetPriceModifier, lastBlockInfo) => { if (!lastBlockInfo) { - return new CheckpointProposal(checkpointHeader, archiveRoot, feeAssetPriceModifier, mockedSig); + return new CheckpointProposal( + checkpointHeader, + archiveRoot, + feeAssetPriceModifier, + mockedSig, + signatureContext, + ); } const txHashes = await Promise.all((lastBlockInfo.txs ?? []).map((tx: Tx) => tx.getTxHash())); - return new CheckpointProposal(checkpointHeader, archiveRoot, feeAssetPriceModifier, mockedSig, { - blockHeader: lastBlockInfo.blockHeader, - indexWithinCheckpoint: lastBlockInfo.indexWithinCheckpoint, - txHashes, - signature: mockedSig, - // Note: signedTxs omitted since publishTxsWithProposals is false in tests - }); + return new CheckpointProposal( + checkpointHeader, + archiveRoot, + feeAssetPriceModifier, + mockedSig, + signatureContext, + { + blockHeader: lastBlockInfo.blockHeader, + indexWithinCheckpoint: lastBlockInfo.indexWithinCheckpoint, + txHashes, + signature: mockedSig, + // Note: signedTxs omitted since publishTxsWithProposals is false in tests + }, + ); }, ); validatorClient.signAttestationsAndSigners.mockImplementation(() => Promise.resolve(getSignatures()[0].signature)); @@ -635,6 +653,7 @@ describe('CheckpointProposalJob', () => { checkpointsBuilder as unknown as FullNodeCheckpointsBuilder, blockSink, l1Constants, + signatureContext, config, timetable, slasherClient, @@ -1602,6 +1621,7 @@ function toCheckpointData(checkpoint: Checkpoint): CheckpointData { checkpointOutHash: checkpoint.getCheckpointOutHash(), startBlock: BlockNumber(checkpoint.blocks[0]?.number ?? 1), blockCount: checkpoint.blocks.length, + feeAssetPriceModifier: checkpoint.feeAssetPriceModifier, attestations: [], l1: L1PublishedData.random(), }; diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts index b08790baff3d..90f98e39cd02 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.timing.test.ts @@ -20,6 +20,7 @@ import type { WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; +import type { CoordinationSignatureContext } from '@aztec/stdlib/p2p'; import { type CheckpointGlobalVariables, GlobalVariables, type Tx } from '@aztec/stdlib/tx'; import { getTelemetryClient } from '@aztec/telemetry-client'; import type { @@ -223,6 +224,10 @@ describe('CheckpointProposalJob Timing Tests', () => { const version = Fr.ZERO; const coinbase = EthAddress.random(); const gasFees = GasFees.empty(); + const signatureContext: CoordinationSignatureContext = { + chainId: chainId.toNumber(), + rollupAddress: EthAddress.random(), + }; const signer = Secp256k1Signer.random(); const mockedSig = Signature.random(); const committee = [signer.address]; @@ -309,6 +314,7 @@ describe('CheckpointProposalJob Timing Tests', () => { checkpointsBuilder as unknown as FullNodeCheckpointsBuilder, blockSink, l1Constants, + signatureContext, config, timetable, slasherClient, @@ -1054,6 +1060,7 @@ describe('CheckpointProposalJob Timing Tests', () => { checkpointsBuilder as unknown as FullNodeCheckpointsBuilder, blockSink, l1Constants, + signatureContext, config, pipeliningTimetable, slasherClient, diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts index d880246c0024..c6a3e61484cc 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_proposal_job.ts @@ -46,8 +46,10 @@ import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@azte import type { BlockProposal, BlockProposalOptions, + CheckpointAttestation, CheckpointProposal, CheckpointProposalOptions, + CoordinationSignatureContext, } from '@aztec/stdlib/p2p'; import { orderAttestations, trimAttestations } from '@aztec/stdlib/p2p'; import type { L2BlockBuiltStats } from '@aztec/stdlib/stats'; @@ -97,6 +99,7 @@ type CheckpointProposalResult = { */ export class CheckpointProposalJob implements Traceable { protected readonly log: Logger; + private readonly checkpointEventLog: Logger; /** Tracks the fire-and-forget L1 submission promise so it can be awaited during shutdown. */ private pendingL1Submission: Promise | undefined; @@ -104,6 +107,10 @@ export class CheckpointProposalJob implements Traceable { /** Pipelined parent chain state used while building and later submitting this checkpoint. */ private pipelinedParentSimulationOverridesPlan?: SimulationOverridesPlan; + private getSignatureContext(): CoordinationSignatureContext { + return this.signatureContext; + } + constructor( private readonly slotNow: SlotNumber, private readonly targetSlot: SlotNumber, @@ -124,6 +131,7 @@ export class CheckpointProposalJob implements Traceable { private readonly checkpointsBuilder: FullNodeCheckpointsBuilder, private readonly blockSink: L2BlockSink, private readonly l1Constants: SequencerRollupConstants, + private readonly signatureContext: CoordinationSignatureContext, protected config: ResolvedSequencerConfig, protected timetable: SequencerTimetable, private readonly slasherClient: SlasherClientInterface | undefined, @@ -141,6 +149,10 @@ export class CheckpointProposalJob implements Traceable { ...bindings, instanceId: `slot-${this.slotNow}`, }); + this.checkpointEventLog = createLogger('sequencer:checkpoint-events', { + ...bindings, + instanceId: `slot-${this.slotNow}`, + }); } /** Awaits the pending L1 submission if one is in progress. Call during shutdown. */ @@ -149,6 +161,13 @@ export class CheckpointProposalJob implements Traceable { await this.pendingL1Submission; } + private logCheckpointEvent(eventName: string, message: string, fields: Record): void { + this.checkpointEventLog.debug(message, { + eventName: `sequencer-checkpoint-${eventName}`, + ...fields, + }); + } + /** * Executes the checkpoint proposal job. * Builds blocks, assembles checkpoint, and broadcasts the proposal (blocking). @@ -248,10 +267,34 @@ export class CheckpointProposalJob implements Traceable { const l1Response = await this.publisher.sendRequestsAt(submitAfter); const proposedAction = l1Response?.successfulActions.find(a => a === 'propose'); if (proposedAction) { + this.logCheckpointEvent('published', `Checkpoint published for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + successfulActions: l1Response?.successfulActions, + sentActions: l1Response?.sentActions, + }); this.eventEmitter.emit('checkpoint-published', { checkpoint: this.checkpointNumber, slot: this.targetSlot }); const coinbase = checkpoint.header.coinbase; await this.metrics.incFilledSlot(this.publisher.getSenderAddress().toString(), coinbase); } else { + this.logCheckpointEvent('publish-failed', `Checkpoint publish failed for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + successfulActions: l1Response?.successfulActions, + failedActions: l1Response?.failedActions, + sentActions: l1Response?.sentActions, + expiredActions: l1Response?.expiredActions, + reason: 'propose_action_not_successful', + }); + this.log.warn(`Checkpoint publish failed for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + successfulActions: l1Response?.successfulActions, + failedActions: l1Response?.failedActions, + sentActions: l1Response?.sentActions, + expiredActions: l1Response?.expiredActions, + reason: 'propose_action_not_successful', + }); this.eventEmitter.emit('checkpoint-publish-failed', { ...l1Response, slot: this.targetSlot }); if (isPipelining) { this.metrics.recordPipelineDiscard(); @@ -261,7 +304,16 @@ export class CheckpointProposalJob implements Traceable { if (err instanceof SequencerInterruptedError) { return; } - this.log.error(`Background attestation/L1 pipeline failed for slot ${this.targetSlot}`, err); + this.logCheckpointEvent('publish-failed', `Checkpoint publish failed for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + reason: err instanceof Error ? err.message : String(err), + }); + this.log.error(`Background attestation/L1 pipeline failed for slot ${this.targetSlot}`, err, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + reason: err instanceof Error ? err.message : String(err), + }); this.eventEmitter.emit('checkpoint-publish-failed', { slot: this.targetSlot }); if (isPipelining) { this.metrics.recordPipelineDiscard(); @@ -466,11 +518,15 @@ export class CheckpointProposalJob implements Traceable { // Start the checkpoint this.setStateFn(SequencerState.INITIALIZING_CHECKPOINT, this.targetSlot); - this.log.info(`Starting checkpoint proposal`, { + this.logCheckpointEvent('slot-started', `Starting checkpoint proposal for slot ${this.targetSlot}`, { buildSlot: this.slotNow, submissionSlot: this.targetSlot, + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, pipelining: this.epochCache.isProposerPipeliningEnabled(), proposer: this.proposer?.toString(), + attestorAddress: this.attestorAddress.toString(), + publisherAddress: this.publisher.getSenderAddress().toString(), coinbase: coinbase.toString(), }); this.metrics.incOpenSlot(this.targetSlot, this.proposer?.toString() ?? 'unknown'); @@ -562,16 +618,38 @@ export class CheckpointProposalJob implements Traceable { } if (blocksInCheckpoint.length === 0) { - this.log.warn(`No blocks were built for slot ${this.targetSlot}`, { slot: this.targetSlot }); + this.logCheckpointEvent('build-failed', `Checkpoint build failed for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + reason: 'no_blocks_built', + }); + this.log.warn(`No blocks were built for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + reason: 'no_blocks_built', + }); this.eventEmitter.emit('checkpoint-empty', { slot: this.targetSlot }); return undefined; } const minBlocksForCheckpoint = this.config.minBlocksForCheckpoint; if (minBlocksForCheckpoint !== undefined && blocksInCheckpoint.length < minBlocksForCheckpoint) { + this.logCheckpointEvent('build-failed', `Checkpoint build failed for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blocksBuilt: blocksInCheckpoint.length, + minBlocksForCheckpoint, + reason: 'min_blocks_not_met', + }); this.log.warn( `Checkpoint has fewer blocks than minimum (${blocksInCheckpoint.length} < ${minBlocksForCheckpoint}), skipping proposal`, - { slot: this.targetSlot, blocksBuilt: blocksInCheckpoint.length, minBlocksForCheckpoint }, + { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blocksBuilt: blocksInCheckpoint.length, + minBlocksForCheckpoint, + reason: 'min_blocks_not_met', + }, ); return undefined; } @@ -592,7 +670,18 @@ export class CheckpointProposalJob implements Traceable { maxTxsPerCheckpoint: this.config.maxTxsPerCheckpoint, }); } catch (err) { + this.logCheckpointEvent('build-failed', `Checkpoint build failed for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blocksBuilt: blocksInCheckpoint.length, + reason: 'invalid_checkpoint', + checkpoint: checkpoint.header.toInspect(), + }); this.log.error(`Built an invalid checkpoint at slot ${this.slotNow} (skipping proposal)`, err, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blocksBuilt: blocksInCheckpoint.length, + reason: 'invalid_checkpoint', checkpoint: checkpoint.header.toInspect(), }); return undefined; @@ -605,6 +694,17 @@ export class CheckpointProposalJob implements Traceable { checkpoint.getStats().txCount, Number(checkpoint.header.totalManaUsed.toBigInt()), ); + this.logCheckpointEvent('built', `Checkpoint built for slot ${this.targetSlot}`, { + slot: this.targetSlot, + buildSlot: this.slotNow, + checkpointNumber: this.checkpointNumber, + proposer: this.proposer?.toString(), + attestorAddress: this.attestorAddress.toString(), + publisherAddress: this.publisher.getSenderAddress().toString(), + blocksBuilt: blocksInCheckpoint.length, + txCount: checkpoint.getStats().txCount, + totalMana: Number(checkpoint.header.totalManaUsed.toBigInt()), + }); // In fisherman mode, return the checkpoint without broadcasting or collecting attestations if (this.config.fishermanMode) { @@ -634,8 +734,10 @@ export class CheckpointProposalJob implements Traceable { ); const blockProposedAt = this.dateProvider.now(); - await this.p2pClient.broadcastCheckpointProposal(proposal); - this.checkpointMetrics.noteCheckpointBroadcast(this.dateProvider.now()); + if (!this.config.skipBroadcastProposals) { + await this.p2pClient.broadcastCheckpointProposal(proposal); + this.checkpointMetrics.noteCheckpointBroadcast(this.dateProvider.now()); + } // Return immediately after broadcast — attestation collection happens in the background return { checkpoint, proposal, blockProposedAt }; @@ -756,7 +858,9 @@ export class CheckpointProposalJob implements Traceable { } // Once we have a signed proposal and the archiver agreed with our proposed block, then we broadcast it. - proposal && (await this.p2pClient.broadcastProposal(proposal)); + if (proposal && !this.config.skipBroadcastProposals) { + await this.p2pClient.broadcastProposal(proposal); + } // Wait until the next block's start time await this.waitUntilNextSubslot(timingInfo.deadline); @@ -830,9 +934,26 @@ export class CheckpointProposalJob implements Traceable { // Wait until we have enough txs to build the block const { availableTxs, canStartBuilding, minTxs } = await this.waitForMinTxs(opts); if (!canStartBuilding) { + this.logCheckpointEvent('block-build-failed', `Block build failed for slot ${this.targetSlot}`, { + reason: 'insufficient_txs', + blockNumber, + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + indexWithinCheckpoint, + availableTxs, + minTxs, + }); this.log.warn( `Not enough txs to build block ${blockNumber} at index ${indexWithinCheckpoint} in slot ${this.targetSlot} (got ${availableTxs} txs but needs ${minTxs})`, - { blockNumber, slot: this.targetSlot, indexWithinCheckpoint }, + { + reason: 'insufficient_txs', + blockNumber, + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + indexWithinCheckpoint, + availableTxs, + minTxs, + }, ); this.eventEmitter.emit('block-tx-count-check-failed', { minTxs, availableTxs, slot: this.targetSlot }); this.metrics.recordBlockProposalFailed('insufficient_txs'); @@ -884,10 +1005,21 @@ export class CheckpointProposalJob implements Traceable { await this.dropFailedTxsFromP2P(buildResult.failedTxs); if (buildResult.status === 'insufficient-valid-txs') { + this.logCheckpointEvent('block-build-failed', `Block build failed for slot ${this.targetSlot}`, { + reason: 'insufficient_valid_txs', + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blockNumber, + numTxs: buildResult.processedCount, + indexWithinCheckpoint, + minValidTxs, + }); this.log.warn( `Block ${blockNumber} at index ${indexWithinCheckpoint} on slot ${this.targetSlot} has too few valid txs to be proposed`, { + reason: 'insufficient_valid_txs', slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, blockNumber, numTxs: buildResult.processedCount, indexWithinCheckpoint, @@ -936,7 +1068,18 @@ export class CheckpointProposalJob implements Traceable { reason: err.message, slot: this.targetSlot, }); - this.log.error(`Error building block`, err, { blockNumber, slot: this.targetSlot }); + this.logCheckpointEvent('block-build-failed', `Block build failed for slot ${this.targetSlot}`, { + reason: err instanceof Error ? err.message : String(err), + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blockNumber, + }); + this.log.error(`Error building block`, err, { + reason: err instanceof Error ? err.message : String(err), + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + blockNumber, + }); this.metrics.recordBlockProposalFailed(err.name || 'unknown_error'); this.metrics.recordFailedBlock(); return { error: err }; @@ -1048,7 +1191,7 @@ export class CheckpointProposalJob implements Traceable { ): Promise { if (this.config.fishermanMode) { this.log.debug('Skipping attestation collection in fisherman mode'); - return CommitteeAttestationsAndSigners.empty(); + return CommitteeAttestationsAndSigners.empty(this.getSignatureContext()); } const slotNumber = proposal.slotNumber; @@ -1058,7 +1201,7 @@ export class CheckpointProposalJob implements Traceable { throw new Error('No committee when collecting attestations'); } else if (committee.length === 0) { this.log.verbose(`Attesting committee is empty`); - return CommitteeAttestationsAndSigners.empty(); + return CommitteeAttestationsAndSigners.empty(this.getSignatureContext()); } else { this.log.debug(`Attesting committee length is ${committee.length}`, { committee }); } @@ -1068,7 +1211,13 @@ export class CheckpointProposalJob implements Traceable { if (this.config.skipCollectingAttestations) { this.log.warn('Skipping attestation collection as per config (attesting with own keys only)'); const attestations = await this.validatorClient?.collectOwnAttestations(proposal, this.checkpointNumber); - return new CommitteeAttestationsAndSigners(orderAttestations(attestations ?? [], committee)); + this.logCheckpointAttestations('collected', committee, attestations ?? [], numberOfRequiredAttestations, { + reason: 'collect_own_only', + }); + return new CommitteeAttestationsAndSigners( + orderAttestations(attestations ?? [], committee), + this.getSignatureContext(), + ); } const attestationTimeAllowed = this.config.enforceTimeTable @@ -1104,6 +1253,9 @@ export class CheckpointProposalJob implements Traceable { // Rollup contract requires that the signatures are provided in the order of the committee const sorted = orderAttestations(trimmed, committee); + this.logCheckpointAttestations('collected', committee, attestations, numberOfRequiredAttestations, { + submittedCount: trimmed.length, + }); // Manipulate the attestations if we've been configured to do so if ( @@ -1115,15 +1267,23 @@ export class CheckpointProposalJob implements Traceable { return this.manipulateAttestations(proposal.slotNumber, epoch, seed, committee, sorted); } - return new CommitteeAttestationsAndSigners(sorted); + return new CommitteeAttestationsAndSigners(sorted, this.getSignatureContext()); } catch (err) { if (err && err instanceof AttestationTimeoutError) { collectedAttestationsCount = err.collectedCount; + this.logCheckpointAttestations('failed', committee, undefined, numberOfRequiredAttestations, { + collectedCount: collectedAttestationsCount, + reason: 'timeout', + }); this.log.error( `Timeout while waiting for attestations for checkpoint proposal at slot ${proposal.slotNumber} (collected ${collectedAttestationsCount}/${numberOfRequiredAttestations})`, err, ); } else { + this.logCheckpointAttestations('failed', committee, undefined, numberOfRequiredAttestations, { + collectedCount: collectedAttestationsCount, + reason: err instanceof Error ? err.message : String(err), + }); this.log.error(`Error collecting attestations for checkpoint proposal at slot ${proposal.slotNumber}`, err); } return undefined; @@ -1132,6 +1292,31 @@ export class CheckpointProposalJob implements Traceable { } } + private logCheckpointAttestations( + status: 'collected' | 'failed', + committee: EthAddress[], + attestations: CheckpointAttestation[] | undefined, + requiredAttestations: number, + opts: { collectedCount?: number; submittedCount?: number; reason?: string } = {}, + ) { + const signedValidators = + attestations + ?.map(attestation => attestation.getSender()?.toString()) + .filter((address): address is `0x${string}` => address !== undefined) ?? []; + const collectedCount = opts.collectedCount ?? new Set(signedValidators).size; + const missingValidatorCount = status === 'failed' ? Math.max(0, requiredAttestations - collectedCount) : undefined; + this.logCheckpointEvent(`attestations-${status}`, `Checkpoint attestations ${status} for slot ${this.targetSlot}`, { + slot: this.targetSlot, + checkpointNumber: this.checkpointNumber, + committeeSize: committee.length, + requiredAttestations, + collectedAttestations: collectedCount, + ...(opts.submittedCount !== undefined && { submittedAttestations: opts.submittedCount }), + ...(missingValidatorCount !== undefined && { missingValidatorCount }), + ...(opts.reason !== undefined && { reason: opts.reason }), + }); + } + /** Breaks the attestations before publishing based on attack configs */ private manipulateAttestations( slotNumber: SlotNumber, @@ -1175,7 +1360,7 @@ export class CheckpointProposalJob implements Traceable { unfreeze(attestations[targetIndex]).signature = generateRecoverableSignature(); } } - return new CommitteeAttestationsAndSigners(attestations); + return new CommitteeAttestationsAndSigners(attestations, this.getSignatureContext()); } if (this.config.shuffleAttestationOrdering) { @@ -1197,11 +1382,11 @@ export class CheckpointProposalJob implements Traceable { [shuffled[i], shuffled[j]] = [shuffled[j], shuffled[i]]; } - const signers = new CommitteeAttestationsAndSigners(attestations).getSigners(); - return new MaliciousCommitteeAttestationsAndSigners(shuffled, signers); + const signers = new CommitteeAttestationsAndSigners(attestations, this.getSignatureContext()).getSigners(); + return new MaliciousCommitteeAttestationsAndSigners(shuffled, signers, this.getSignatureContext()); } - return new CommitteeAttestationsAndSigners(attestations); + return new CommitteeAttestationsAndSigners(attestations, this.getSignatureContext()); } private async dropFailedTxsFromP2P(failedTxs: FailedTx[]) { diff --git a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts index f4ba1f24e434..720e699d3d96 100644 --- a/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/checkpoint_voter.ha.integration.test.ts @@ -235,6 +235,7 @@ describe('CheckpointVoter HA Integration', () => { disableValidator: false, disabledValidators: [], haSigningEnabled: true, + l1ChainId: 1, l1Contracts: { rollupAddress: EthAddress.fromString(rollupContract.address.toString()) }, nodeId: config.nodeId || 'ha-node-1', pollingIntervalMs: 100, diff --git a/yarn-project/sequencer-client/src/sequencer/metrics.ts b/yarn-project/sequencer-client/src/sequencer/metrics.ts index b1d8e185faef..677f147034bf 100644 --- a/yarn-project/sequencer-client/src/sequencer/metrics.ts +++ b/yarn-project/sequencer-client/src/sequencer/metrics.ts @@ -26,6 +26,7 @@ export class SequencerMetrics { private blockBuildDuration: Histogram; private blockBuildManaPerSecond: Gauge; private stateTransitionBufferDuration: Histogram; + private stateDuration: Histogram; // these are gauges because for individual sequencers building a block is not something that happens often enough to warrant a histogram private timeToCollectAttestations: Gauge; @@ -90,6 +91,8 @@ export class SequencerMetrics { this.stateTransitionBufferDuration = this.meter.createHistogram(Metrics.SEQUENCER_STATE_TRANSITION_BUFFER_DURATION); + this.stateDuration = this.meter.createHistogram(Metrics.SEQUENCER_STATE_DURATION); + this.rewards = this.meter.createGauge(Metrics.SEQUENCER_CURRENT_SLOT_REWARDS); this.slots = createUpDownCounterWithDefault(this.meter, Metrics.SEQUENCER_SLOT_COUNT); @@ -252,6 +255,12 @@ export class SequencerMetrics { }); } + recordStateDuration(durationMs: number, state: SequencerState) { + this.stateDuration.record(Math.ceil(durationMs), { + [Attributes.SEQUENCER_STATE]: state, + }); + } + recordPipelineDepth(depth: number) { this.pipelineDepth.record(depth); } diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 68fbe922d398..e6ac31a7583d 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -29,6 +29,7 @@ import { type ValidateCheckpointNegativeResult, } from '@aztec/stdlib/block'; import { Checkpoint } from '@aztec/stdlib/checkpoint'; +import type { ChainConfig } from '@aztec/stdlib/config'; import type { L1RollupConstants } from '@aztec/stdlib/epoch-helpers'; import { GasFees } from '@aztec/stdlib/gas'; import { @@ -76,6 +77,7 @@ describe('sequencer', () => { let newBlockNumber: BlockNumber; let newSlotNumber: number; let hash: string; + let signatureContext: { chainId: number; rollupAddress: EthAddress }; let block: L2Block; let globalVariables: GlobalVariables; @@ -153,7 +155,7 @@ describe('sequencer', () => { }; const expectPublisherProposeL2Block = () => { - const attestationsAndSigners = new CommitteeAttestationsAndSigners(getSignatures()); + const attestationsAndSigners = new CommitteeAttestationsAndSigners(getSignatures(), signatureContext); expect(publisher.enqueueProposeCheckpoint).toHaveBeenCalledTimes(1); expect(publisher.enqueueProposeCheckpoint).toHaveBeenCalledWith( expect.any(Checkpoint), @@ -315,7 +317,7 @@ describe('sequencer', () => { getCheckpointsForEpoch: mockFn().mockResolvedValue([]), getCheckpointsDataForEpoch: mockFn().mockResolvedValue([]), getSyncedL2SlotNumber: mockFn().mockResolvedValue(SlotNumber(Number.MAX_SAFE_INTEGER)), - getProposedCheckpoint: mockFn().mockResolvedValue(undefined), + getLastCheckpoint: mockFn().mockResolvedValue(undefined), }); l1ToL2MessageSource = mock({ @@ -350,7 +352,13 @@ describe('sequencer', () => { dateProvider = new TestDateProvider(); - const config: SequencerConfig = { enforceTimeTable: true, maxTxsPerBlock: 4 }; + signatureContext = { chainId: chainId.toNumber(), rollupAddress: EthAddress.random() }; + const config: SequencerConfig & Pick = { + enforceTimeTable: true, + maxTxsPerBlock: 4, + l1ChainId: signatureContext.chainId, + l1Contracts: { rollupAddress: signatureContext.rollupAddress }, + }; sequencer = new TestSequencer( publisherFactory, validatorClient, @@ -586,7 +594,7 @@ describe('sequencer', () => { await sequencer.work(); await sequencer.awaitLastProposalSubmission(); - const attestationsAndSigners = new CommitteeAttestationsAndSigners(getSignatures()); + const attestationsAndSigners = new CommitteeAttestationsAndSigners(getSignatures(), signatureContext); expect(publishers[i].enqueueProposeCheckpoint).toHaveBeenCalledTimes(1); expect(publishers[i].enqueueProposeCheckpoint).toHaveBeenCalledWith( expect.any(Checkpoint), @@ -1060,7 +1068,7 @@ describe('sequencer', () => { checkpointNumber: CheckpointNumber(1), indexWithinCheckpoint: IndexWithinCheckpoint(0), } satisfies BlockData); - l2BlockSource.getProposedCheckpoint.mockResolvedValue({ + l2BlockSource.getLastCheckpoint.mockResolvedValue({ checkpointNumber: CheckpointNumber(1), } as any); @@ -1121,7 +1129,7 @@ describe('sequencer', () => { checkpointNumber: CheckpointNumber(3), indexWithinCheckpoint: IndexWithinCheckpoint(0), } satisfies BlockData); - l2BlockSource.getProposedCheckpoint.mockResolvedValue({ + l2BlockSource.getLastCheckpoint.mockResolvedValue({ checkpointNumber: CheckpointNumber(2), } as any); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 3ea6547592e2..4fc638ceeae1 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -6,7 +6,7 @@ import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/f import { merge, omit, pick } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { createLogger } from '@aztec/foundation/log'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import type { DateProvider } from '@aztec/foundation/timer'; import type { TypedEventEmitter } from '@aztec/foundation/types'; @@ -14,6 +14,7 @@ import type { P2P } from '@aztec/p2p'; import type { SlasherClientInterface } from '@aztec/slasher'; import type { BlockData, L2BlockSink, L2BlockSource, ValidateCheckpointResult } from '@aztec/stdlib/block'; import type { Checkpoint, ProposedCheckpointData } from '@aztec/stdlib/checkpoint'; +import type { ChainConfig } from '@aztec/stdlib/config'; import { getSlotStartBuildTimestamp } from '@aztec/stdlib/epoch-helpers'; import { type ResolvedSequencerConfig, @@ -22,6 +23,7 @@ import { type WorldStateSynchronizer, } from '@aztec/stdlib/interfaces/server'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; +import type { CoordinationSignatureContext } from '@aztec/stdlib/p2p'; import { pickFromSchema } from '@aztec/stdlib/schemas'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { Attributes, type TelemetryClient, type Tracer, getTelemetryClient, trackSpan } from '@aztec/telemetry-client'; @@ -56,8 +58,11 @@ export { SequencerState }; export class Sequencer extends (EventEmitter as new () => TypedEventEmitter) { private runningPromise?: RunningPromise; private state = SequencerState.STOPPED; + private stateSlotNumber: SlotNumber | undefined; + private stateEnteredAtMs = performance.now(); private metrics: SequencerMetrics; private checkpointProposalJobMetrics: CheckpointProposalJobMetrics; + private readonly stateLog: Logger; /** The last slot for which we attempted to perform our voting duties with degraded block production */ private lastSlotForFallbackVote: SlotNumber | undefined; @@ -82,6 +87,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter, protected telemetry: TelemetryClient = getTelemetryClient(), protected log = createLogger('sequencer'), ) { super(); + this.stateLog = log.createChild('state'); // Add [FISHERMAN] prefix to logger if in fisherman mode if (config.fishermanMode) { this.log = log.createChild('[FISHERMAN]'); } + this.signatureContext = { + chainId: config.l1ChainId, + rollupAddress: config.l1Contracts.rollupAddress, + }; this.metrics = new SequencerMetrics(telemetry, this.rollupContract, 'Sequencer'); this.checkpointProposalJobMetrics = new CheckpointProposalJobMetrics(telemetry); this.updateConfig(config); @@ -377,7 +388,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter TypedEventEmitter TypedEventEmitter p2p.syncedToL2Block), this.l1ToL2MessageSource.getL2Tips().then(t => ({ proposed: t.proposed, checkpointed: t.checkpointed })), this.l2BlockSource.getPendingChainValidationStatus(), - this.l2BlockSource.getProposedCheckpointOnly(), + this.l2BlockSource.getLastProposedCheckpoint(), ] as const); const [worldState, l2Tips, p2p, l1ToL2MessageSourceTips, pendingChainValidationStatus, proposedCheckpointData] = diff --git a/yarn-project/sequencer-client/src/sequencer/timetable.test.ts b/yarn-project/sequencer-client/src/sequencer/timetable.test.ts index a11e1653798f..eb2dbc9c1697 100644 --- a/yarn-project/sequencer-client/src/sequencer/timetable.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/timetable.test.ts @@ -483,7 +483,7 @@ describe('sequencer-timetable', () => { expect((tt.maxNumberOfBlocks + 1) * blockDuration).toBeGreaterThan(timeAvailableForBlocks); }); - it('fits 12 pipelined blocks in a 72s slot with 5.5s cadence', () => { + it('fits 11 pipelined blocks in a 72s slot with 5.5s cadence', () => { const tt = new SequencerTimetable({ ethereumSlotDuration: ETHEREUM_SLOT_DURATION, aztecSlotDuration: AZTEC_SLOT_DURATION, @@ -493,8 +493,9 @@ describe('sequencer-timetable', () => { pipelining: true, }); - expect(tt.maxNumberOfBlocks).toBe(12); - expect(tt.pipeliningAttestationGracePeriod).toBe(BLOCK_DURATION_MS / 1000 + tt.p2pPropagationTime); + // reserved = assemble(1) + 2*p2p(2) + block(5.5) = 10.5; available = 72 - 1 - 10.5 = 60.5; floor(60.5/5.5) = 11 + expect(tt.maxNumberOfBlocks).toBe(11); + expect(tt.pipeliningAttestationGracePeriod).toBe(0); }); }); @@ -516,7 +517,7 @@ describe('sequencer-timetable', () => { expect(withPipelining.maxNumberOfBlocks).toBeGreaterThan(withoutPipelining.maxNumberOfBlocks); }); - it('reserves time for assembly and one-way broadcast at end of slot', () => { + it('reserves time for assembly, round-trip broadcast, and re-execution at end of slot', () => { const tt = new SequencerTimetable({ ethereumSlotDuration: ETHEREUM_SLOT_DURATION, aztecSlotDuration: AZTEC_SLOT_DURATION, @@ -527,8 +528,8 @@ describe('sequencer-timetable', () => { }); const blockDuration = BLOCK_DURATION_MS / 1000; - // Reserves assembleTime + p2pPropagation (one-way broadcast) at end - const timeReservedAtEnd = tt.checkpointAssembleTime + tt.p2pPropagationTime; + // Reserves assembleTime + round-trip p2p + last-block re-execution at end + const timeReservedAtEnd = tt.checkpointAssembleTime + 2 * tt.p2pPropagationTime + blockDuration; const availableTime = AZTEC_SLOT_DURATION - tt.initializationOffset - timeReservedAtEnd; expect(tt.maxNumberOfBlocks).toBe(Math.floor(availableTime / blockDuration)); }); @@ -561,11 +562,11 @@ describe('sequencer-timetable', () => { }); // With pipelining and test config (ethereumSlotDuration < 8): - // init=0.5, reservedAtEnd = 0.5 + 0 = 0.5, available = 36 - 0.5 - 0.5 = 35, floor(35/8) = 4 - expect(tt.maxNumberOfBlocks).toBe(4); + // init=0.5, assemble=0.5, p2p=0.5, reservedAtEnd = 0.5 + 2*0.5 + 8 = 9.5, available = 36 - 0.5 - 9.5 = 26, floor(26/8) = 3 + expect(tt.maxNumberOfBlocks).toBe(3); }); - it('sets pipeliningAttestationGracePeriod to blockDuration + p2pPropagationTime', () => { + it('sets pipeliningAttestationGracePeriod to zero under early pipelining', () => { const tt = new SequencerTimetable({ ethereumSlotDuration: ETHEREUM_SLOT_DURATION, aztecSlotDuration: AZTEC_SLOT_DURATION, @@ -575,7 +576,7 @@ describe('sequencer-timetable', () => { pipelining: true, }); - expect(tt.pipeliningAttestationGracePeriod).toBe(tt.blockDuration! + tt.p2pPropagationTime); + expect(tt.pipeliningAttestationGracePeriod).toBe(0); }); it('uses separate pipelined deadlines for attestation start vs publish cutoff', () => { diff --git a/yarn-project/sequencer-client/src/test/utils.ts b/yarn-project/sequencer-client/src/test/utils.ts index 4da8e08e3a6a..20343e0b1608 100644 --- a/yarn-project/sequencer-client/src/test/utils.ts +++ b/yarn-project/sequencer-client/src/test/utils.ts @@ -10,7 +10,11 @@ import { PublicDataWrite } from '@aztec/stdlib/avm'; import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block'; import { BlockProposal, CheckpointAttestation, CheckpointProposal, ConsensusPayload } from '@aztec/stdlib/p2p'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; -import { makeAppendOnlyTreeSnapshot, mockTxForRollup } from '@aztec/stdlib/testing'; +import { + TEST_COORDINATION_SIGNATURE_CONTEXT, + makeAppendOnlyTreeSnapshot, + mockTxForRollup, +} from '@aztec/stdlib/testing'; import { BlockHeader, GlobalVariables, type Tx, makeProcessedTxFromPrivateOnlyTx } from '@aztec/stdlib/tx'; import type { MockProxy } from 'jest-mock-extended'; @@ -109,6 +113,7 @@ export function createBlockProposal(block: L2Block, signature: Signature): Block block.archive.root, txHashes, signature, + TEST_COORDINATION_SIGNATURE_CONTEXT, ); } @@ -123,12 +128,19 @@ export function createCheckpointProposal( ): CheckpointProposal { const txHashes = block.body.txEffects.map(tx => tx.txHash); const checkpointHeader = createCheckpointHeaderFromBlock(block); - return new CheckpointProposal(checkpointHeader, block.archive.root, feeAssetPriceModifier, checkpointSignature, { - blockHeader: block.header, - indexWithinCheckpoint: block.indexWithinCheckpoint, - txHashes, - signature: blockSignature ?? checkpointSignature, // Use checkpoint signature as block signature if not provided - }); + return new CheckpointProposal( + checkpointHeader, + block.archive.root, + feeAssetPriceModifier, + checkpointSignature, + TEST_COORDINATION_SIGNATURE_CONTEXT, + { + blockHeader: block.header, + indexWithinCheckpoint: block.indexWithinCheckpoint, + txHashes, + signature: blockSignature ?? checkpointSignature, // Use checkpoint signature as block signature if not provided + }, + ); } /** @@ -143,10 +155,16 @@ export function createCheckpointAttestation( feeAssetPriceModifier: bigint = 0n, ): CheckpointAttestation { const checkpointHeader = createCheckpointHeaderFromBlock(block); - const payload = new ConsensusPayload(checkpointHeader, block.archive.root, feeAssetPriceModifier); + const payload = new ConsensusPayload( + checkpointHeader, + block.archive.root, + feeAssetPriceModifier, + TEST_COORDINATION_SIGNATURE_CONTEXT, + ); const attestation = new CheckpointAttestation(payload, signature, signature); - // Set sender directly for testing (bypasses signature recovery) - (attestation as any).sender = sender; + // Bypass signature recovery for testing since we use random signatures + (attestation as any).getSender = () => sender; + (attestation as any).getProposer = () => sender; return attestation; } diff --git a/yarn-project/stdlib/src/block/attestation_info.ts b/yarn-project/stdlib/src/block/attestation_info.ts index 01b4bb51b270..64657db48b89 100644 --- a/yarn-project/stdlib/src/block/attestation_info.ts +++ b/yarn-project/stdlib/src/block/attestation_info.ts @@ -3,7 +3,7 @@ import type { EthAddress } from '@aztec/foundation/eth-address'; import { Checkpoint } from '../checkpoint/checkpoint.js'; import { ConsensusPayload } from '../p2p/consensus_payload.js'; -import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from '../p2p/signature_utils.js'; +import { type CoordinationSignatureContext, getHashedSignaturePayloadTypedData } from '../p2p/signature_utils.js'; import type { CommitteeAttestation } from './proposal/committee_attestation.js'; /** @@ -32,11 +32,14 @@ export type AttestationInfo = * Extracts attestation information from a published checkpoint. * Returns info for each attestation, preserving array indices. */ -export function getAttestationInfoFromPublishedCheckpoint(block: { - attestations: CommitteeAttestation[]; - checkpoint: Checkpoint; -}): AttestationInfo[] { - const payload = ConsensusPayload.fromCheckpoint(block.checkpoint); +export function getAttestationInfoFromPublishedCheckpoint( + block: { + attestations: CommitteeAttestation[]; + checkpoint: Checkpoint; + }, + signatureContext: CoordinationSignatureContext, +): AttestationInfo[] { + const payload = ConsensusPayload.fromCheckpoint(block.checkpoint, signatureContext); return getAttestationInfoFromPayload(payload, block.attestations); } @@ -44,10 +47,7 @@ export function getAttestationInfoFromPayload( payload: ConsensusPayload, attestations: CommitteeAttestation[], ): AttestationInfo[] { - const hashedPayload = getHashedSignaturePayloadEthSignedMessage( - payload, - SignatureDomainSeparator.checkpointAttestation, - ); + const hashedPayload = getHashedSignaturePayloadTypedData(payload); return attestations.map(attestation => { // If signature is empty, check if we have an address directly @@ -62,7 +62,7 @@ export function getAttestationInfoFromPayload( // Try to recover address from signature try { - const recoveredAddress = recoverAddress(hashedPayload, attestation.signature); + const recoveredAddress = recoverAddress(hashedPayload, attestation.signature, { allowYParityAsV: true }); return { address: recoveredAddress, status: 'recovered-from-signature' as const }; } catch { // Signature present but recovery failed diff --git a/yarn-project/stdlib/src/block/block_data.ts b/yarn-project/stdlib/src/block/block_data.ts index 584fc68915b8..3f7aeebf7863 100644 --- a/yarn-project/stdlib/src/block/block_data.ts +++ b/yarn-project/stdlib/src/block/block_data.ts @@ -3,9 +3,12 @@ import type { CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/ import { z } from 'zod'; +import { CheckpointDataSchema } from '../checkpoint/checkpoint_data.js'; +import { L1PublishedData } from '../checkpoint/published_checkpoint.js'; import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; import { BlockHeader } from '../tx/block_header.js'; import { BlockHash } from './block_hash.js'; +import { CommitteeAttestation } from './proposal/committee_attestation.js'; /** L2Block metadata. Equivalent to L2Block but without block body containing tx data. */ export type BlockData = { @@ -23,3 +26,17 @@ export const BlockDataSchema = z.object({ checkpointNumber: CheckpointNumberSchema, indexWithinCheckpoint: IndexWithinCheckpointSchema, }); + +export const BlockDataWithCheckpointContextSchema = z + .object({ + data: BlockDataSchema, + checkpoint: CheckpointDataSchema.optional(), + l1: L1PublishedData.schema.optional(), + attestations: z.array(CommitteeAttestation.schema), + }) + .transform(obj => ({ + data: obj.data, + checkpoint: obj.checkpoint, + l1: obj.l1, + attestations: obj.attestations, + })); diff --git a/yarn-project/stdlib/src/block/block_parameter.ts b/yarn-project/stdlib/src/block/block_parameter.ts index a97695d639fa..686fd3abeaa4 100644 --- a/yarn-project/stdlib/src/block/block_parameter.ts +++ b/yarn-project/stdlib/src/block/block_parameter.ts @@ -1,10 +1,43 @@ import { BlockNumberSchema } from '@aztec/foundation/branded-types'; +import { jsonStringify } from '@aztec/foundation/json-rpc'; +import { schemas } from '@aztec/foundation/schemas'; import { z } from 'zod'; +import { ChainTipSchema } from '../interfaces/chain_tips.js'; import { BlockHash } from './block_hash.js'; -export const BlockParameterSchema = z.union([BlockHash.schema, BlockNumberSchema, z.literal('latest')]); +/** + * Selector for a block in RPC calls. + * + * Accepts a block number, a {@link BlockHash}, a chain-tip name (e.g. `'proven'`, `'checkpointed'`), + * `'latest'` (alias for `'proposed'`), or the explicit object variants `{ number }`, `{ hash }`, + * and `{ archive }`. + */ +export const BlockParameterSchema = z.union([ + BlockHash.schema, + BlockNumberSchema, + ChainTipSchema, + z.literal('latest'), + z.object({ number: BlockNumberSchema }), + z.object({ hash: BlockHash.schema }), + z.object({ archive: schemas.Fr }), +]); -/** Block parameter - either a specific BlockNumber, block hash (BlockHash), or 'latest' */ export type BlockParameter = z.infer; + +export function inspectBlockParameter(param: BlockParameter) { + if (typeof param === 'number') { + return param.toString(); + } else if (typeof param === 'string') { + return param; + } else if ('number' in param) { + return `number=${param.number.toString()}`; + } else if ('hash' in param) { + return `hash=${param.hash.toString()}`; + } else if ('archive' in param) { + return `archive=${param.archive.toString()}`; + } else { + return jsonStringify(param); + } +} diff --git a/yarn-project/stdlib/src/block/l2_block_source.ts b/yarn-project/stdlib/src/block/l2_block_source.ts index 880ffb15332a..72cbf27e2c5c 100644 --- a/yarn-project/stdlib/src/block/l2_block_source.ts +++ b/yarn-project/stdlib/src/block/l2_block_source.ts @@ -14,7 +14,7 @@ import { z } from 'zod'; import type { Checkpoint } from '../checkpoint/checkpoint.js'; import type { CheckpointData, CommonCheckpointData, ProposedCheckpointData } from '../checkpoint/checkpoint_data.js'; -import type { PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; +import type { L1PublishedData, PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; import type { L1RollupConstants } from '../epoch-helpers/index.js'; import { CheckpointHeader } from '../rollup/checkpoint_header.js'; import type { BlockHeader } from '../tx/block_header.js'; @@ -25,8 +25,17 @@ import type { BlockData } from './block_data.js'; import type { BlockHash } from './block_hash.js'; import type { CheckpointedL2Block } from './checkpointed_l2_block.js'; import type { L2Block } from './l2_block.js'; +import type { CommitteeAttestation } from './proposal/committee_attestation.js'; import type { ValidateCheckpointNegativeResult, ValidateCheckpointResult } from './validate_block_result.js'; +/** Block metadata plus checkpoint-derived context (L1 publish info, attestations). */ +export type BlockDataWithCheckpointContext = { + data: BlockData; + checkpoint?: CheckpointData; + l1?: L1PublishedData; + attestations: CommitteeAttestation[]; +}; + /** * Interface of classes allowing for the retrieval of L2 blocks. */ @@ -111,6 +120,37 @@ export interface L2BlockSource { */ getCheckpointsDataForEpoch(epochNumber: EpochNumber): Promise; + /** + * Gets lightweight checkpoint metadata for a single checkpoint. + * Cheap passthrough for metadata-only queries (no block body reads). + * @param checkpointNumber - The checkpoint number to retrieve. + * @returns The requested checkpoint data (or undefined if not found). + */ + getCheckpointData(checkpointNumber: CheckpointNumber): Promise; + + /** + * Gets up to `limit` amount of checkpoint metadata entries starting from `from`. + * Cheap passthrough for metadata-only queries (no block body reads). + * @param from - The first checkpoint number to return (inclusive). + * @param limit - The maximum number of checkpoints to return. + */ + getCheckpointDataRange(from: CheckpointNumber, limit: number): Promise; + + /** + * Looks up the checkpoint number that contains the given slot. + * @param slot - The slot number to look up. + * @returns The checkpoint number (or undefined if not found). + */ + getCheckpointNumberBySlot(slot: SlotNumber): Promise; + + /** + * Gets block metadata plus checkpoint-derived context (L1 publish info, attestations) + * without deserializing tx bodies. Uses checkpoint-level values when the block is + * checkpointed; otherwise returns `l1: undefined` and empty attestations. + * @param number - The block number to retrieve. + */ + getBlockDataWithCheckpointContext(number: BlockNumber): Promise; + /** * Gets a block header by its hash. * @param blockHash - The block hash to retrieve. @@ -230,10 +270,10 @@ export interface L2BlockSource { getPendingChainValidationStatus(): Promise; /** Returns the checkpoint at the proposed chain tip. */ - getProposedCheckpoint(): Promise; + getLastCheckpoint(): Promise; /** Returns proposed checkpoint, if set, undefined if not*/ - getProposedCheckpointOnly(): Promise; + getLastProposedCheckpoint(): Promise; /** Force a sync. */ syncImmediate(): Promise; diff --git a/yarn-project/stdlib/src/block/proposal/attestations_and_signers.ts b/yarn-project/stdlib/src/block/proposal/attestations_and_signers.ts index c5c5a90c6426..4213fc6abfb5 100644 --- a/yarn-project/stdlib/src/block/proposal/attestations_and_signers.ts +++ b/yarn-project/stdlib/src/block/proposal/attestations_and_signers.ts @@ -4,26 +4,37 @@ import { hexToBuffer } from '@aztec/foundation/string'; import { encodeAbiParameters, parseAbiParameters } from 'viem'; import { z } from 'zod'; -import type { Signable, SignatureDomainSeparator } from '../../p2p/signature_utils.js'; +import { + type CoordinationSignatureContext, + type CoordinationSignatureType, + type Signable, + coordinationSignatureContextSchema, +} from '../../p2p/signature_utils.js'; import { CommitteeAttestation, EthAddress } from './committee_attestation.js'; export class CommitteeAttestationsAndSigners implements Signable { - constructor(public attestations: CommitteeAttestation[]) {} + readonly primaryType: CoordinationSignatureType = 'AttestationsAndSigners'; + + constructor( + public attestations: CommitteeAttestation[], + public readonly signatureContext: CoordinationSignatureContext, + ) {} static get schema() { return z .object({ attestations: CommitteeAttestation.schema.array(), + signatureContext: coordinationSignatureContextSchema, }) - .transform(obj => new CommitteeAttestationsAndSigners(obj.attestations)); + .transform(obj => new CommitteeAttestationsAndSigners(obj.attestations, obj.signatureContext)); } - getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer { - const abi = parseAbiParameters('uint8,(bytes,bytes),address[]'); + getPayloadToSign(): Buffer { + // Matches the L1 abi.encode(attestations, signers) in AttestationLib.sol#getAttestationsAndSignersDigest. + const abi = parseAbiParameters('(bytes,bytes),address[]'); const packed = this.getPackedAttestations(); const encodedData = encodeAbiParameters(abi, [ - domainSeparator, [packed.signatureIndices, packed.signaturesOrAddresses], this.getSigners().map(s => s.toString()), ]); @@ -31,8 +42,8 @@ export class CommitteeAttestationsAndSigners implements Signable { return hexToBuffer(encodedData); } - static empty(): CommitteeAttestationsAndSigners { - return new CommitteeAttestationsAndSigners([]); + static empty(signatureContext: CoordinationSignatureContext): CommitteeAttestationsAndSigners { + return new CommitteeAttestationsAndSigners([], signatureContext); } toString() { @@ -53,9 +64,9 @@ export class CommitteeAttestationsAndSigners implements Signable { * @param attestations - Array of committee attestations with addresses and signatures * @returns Packed attestations with bitmap and tightly packed signature/address data */ - getPackedAttestations(): ViemCommitteeAttestations { - const length = this.attestations.length; - const attestations = this.attestations.map(a => a.toViem()); + static packAttestations(attestations: CommitteeAttestation[]): ViemCommitteeAttestations { + const length = attestations.length; + const viemAttestations = attestations.map(a => a.toViem()); // Calculate bitmap size (1 bit per attestation, rounded up to nearest byte) const bitmapSize = Math.ceil(length / 8); @@ -63,8 +74,8 @@ export class CommitteeAttestationsAndSigners implements Signable { // Calculate total data size needed let totalDataSize = 0; - for (let i = 0; i < length; i++) { - const signature = attestations[i].signature; + for (const attestation of viemAttestations) { + const signature = attestation.signature; // Check if signature is empty (v = 0) const isEmpty = signature.v === 0; @@ -79,8 +90,7 @@ export class CommitteeAttestationsAndSigners implements Signable { let dataIndex = 0; // Pack the data - for (let i = 0; i < length; i++) { - const attestation = attestations[i]; + for (const [i, attestation] of viemAttestations.entries()) { const signature = attestation.signature; // Check if signature is empty @@ -90,7 +100,7 @@ export class CommitteeAttestationsAndSigners implements Signable { // Set bit in bitmap (bit 7-0 in each byte, left to right) const byteIndex = Math.floor(i / 8); const bitIndex = 7 - (i % 8); - signatureIndices[byteIndex] |= 1 << bitIndex; + signatureIndices[byteIndex] = (signatureIndices[byteIndex] ?? 0) | (1 << bitIndex); // Pack signature: v + r + s signaturesOrAddresses[dataIndex] = signature.v; @@ -118,6 +128,10 @@ export class CommitteeAttestationsAndSigners implements Signable { signaturesOrAddresses: `0x${Buffer.from(signaturesOrAddresses).toString('hex')}`, }; } + + getPackedAttestations(): ViemCommitteeAttestations { + return CommitteeAttestationsAndSigners.packAttestations(this.attestations); + } } /** @@ -130,8 +144,9 @@ export class MaliciousCommitteeAttestationsAndSigners extends CommitteeAttestati constructor( attestations: CommitteeAttestation[], private signers: EthAddress[], + signatureContext: CoordinationSignatureContext, ) { - super(attestations); + super(attestations, signatureContext); } override getSigners(): EthAddress[] { diff --git a/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts b/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts index 38cd062e8b50..b8ce95fd1231 100644 --- a/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts +++ b/yarn-project/stdlib/src/checkpoint/checkpoint_data.ts @@ -32,6 +32,8 @@ export type L1EnrichedCheckpointData = { export type StorageEnrichedCheckpointData = { archive: AppendOnlyTreeSnapshot; checkpointOutHash: Fr; + /** Fee asset price modifier in basis points. Defaults to 0 (no change) when not explicitly set. */ + feeAssetPriceModifier: bigint; }; /** Data stored only with proposed checkpoint data */ @@ -70,6 +72,7 @@ export const CheckpointDataSchema = z checkpointOutHash: schemas.Fr, startBlock: BlockNumberSchema, blockCount: schemas.Integer, + feeAssetPriceModifier: schemas.BigInt, attestations: z.array(CommitteeAttestation.schema), l1: L1PublishedData.schema, }) @@ -81,6 +84,7 @@ export const CheckpointDataSchema = z checkpointOutHash: obj.checkpointOutHash, startBlock: obj.startBlock, blockCount: obj.blockCount, + feeAssetPriceModifier: obj.feeAssetPriceModifier, attestations: obj.attestations, l1: obj.l1, }), diff --git a/yarn-project/stdlib/src/config/chain-config.ts b/yarn-project/stdlib/src/config/chain-config.ts index 533a1e0f798b..d512d49588a3 100644 --- a/yarn-project/stdlib/src/config/chain-config.ts +++ b/yarn-project/stdlib/src/config/chain-config.ts @@ -1,5 +1,5 @@ import { l1ContractAddressesMapping } from '@aztec/ethereum/l1-contract-addresses'; -import type { ConfigMappingsType } from '@aztec/foundation/config'; +import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; export { type SequencerConfig, SequencerConfigSchema } from '../interfaces/configs.js'; @@ -14,8 +14,7 @@ export const emptyChainConfig: ChainConfig = { export const chainConfigMappings: ConfigMappingsType = { l1ChainId: { env: 'L1_CHAIN_ID', - parseEnv: (val: string) => +val, - defaultValue: 31337, + ...numberConfigHelper(31337), description: 'The chain ID of the ethereum host.', }, rollupVersion: { diff --git a/yarn-project/stdlib/src/config/sequencer-config.ts b/yarn-project/stdlib/src/config/sequencer-config.ts index 7108011f0d04..cd201d4d8ff9 100644 --- a/yarn-project/stdlib/src/config/sequencer-config.ts +++ b/yarn-project/stdlib/src/config/sequencer-config.ts @@ -1,4 +1,9 @@ -import type { ConfigMappingsType } from '@aztec/foundation/config'; +import { + type ConfigMappingsType, + floatConfigHelper, + numberConfigHelper, + optionalNumberConfigHelper, +} from '@aztec/foundation/config'; import type { SequencerConfig } from '../interfaces/configs.js'; import { DEFAULT_P2P_PROPAGATION_TIME } from '../timetable/index.js'; @@ -23,25 +28,24 @@ export const sharedSequencerConfigMappings: ConfigMappingsType< description: 'Duration per block in milliseconds when building multiple blocks per slot. ' + 'If undefined (default), builds a single block per slot using the full slot duration.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, expectedBlockProposalsPerSlot: { env: 'SEQ_EXPECTED_BLOCK_PROPOSALS_PER_SLOT', description: 'Expected number of block proposals per slot for P2P peer scoring. ' + '0 (default) disables block proposal scoring. Set to a positive value to enable.', - parseEnv: (val: string) => parseInt(val, 10), - defaultValue: 0, + ...numberConfigHelper(0), }, maxTxsPerBlock: { env: 'SEQ_MAX_TX_PER_BLOCK', description: 'The maximum number of txs to include in a block.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, attestationPropagationTime: { env: 'SEQ_ATTESTATION_PROPAGATION_TIME', description: 'How many seconds it takes for proposals and attestations to travel across the p2p layer (one-way).', - parseEnv: (val: string) => parseFloat(val), defaultValue: DEFAULT_P2P_PROPAGATION_TIME, + ...floatConfigHelper(DEFAULT_P2P_PROPAGATION_TIME), }, }; diff --git a/yarn-project/stdlib/src/ha-signing/local_config.ts b/yarn-project/stdlib/src/ha-signing/local_config.ts index b28b386cc4c6..479ab4e7199e 100644 --- a/yarn-project/stdlib/src/ha-signing/local_config.ts +++ b/yarn-project/stdlib/src/ha-signing/local_config.ts @@ -1,4 +1,4 @@ -import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; +import { type ConfigMappingsType, getConfigFromMappings, optionalNumberConfigHelper } from '@aztec/foundation/config'; import { zodFor } from '@aztec/foundation/schemas'; import { type DataStoreConfig, dataConfigMappings } from '@aztec/stdlib/kv-store'; @@ -26,7 +26,7 @@ export const localSignerConfigMappings: ConfigMappingsType = env: 'SIGNING_PROTECTION_MAP_SIZE_KB', description: 'Maximum size of the local signing-protection LMDB store in KB. Overwrites the general dataStoreMapSizeKb.', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), }, }; diff --git a/yarn-project/stdlib/src/interfaces/archiver.test.ts b/yarn-project/stdlib/src/interfaces/archiver.test.ts index 961ce6d79e2d..be161a9cfdd4 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.test.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.test.ts @@ -128,6 +128,26 @@ describe('ArchiverApiSchema', () => { expect(result).toBeUndefined(); }); + it('getBlockDataWithCheckpointContext', async () => { + const result = await context.client.getBlockDataWithCheckpointContext(BlockNumber(1)); + expect(result).toBeUndefined(); + }); + + it('getCheckpointData', async () => { + const result = await context.client.getCheckpointData(CheckpointNumber(1)); + expect(result).toBeUndefined(); + }); + + it('getCheckpointDataRange', async () => { + const result = await context.client.getCheckpointDataRange(CheckpointNumber(1), 1); + expect(result).toEqual([]); + }); + + it('getCheckpointNumberBySlot', async () => { + const result = await context.client.getCheckpointNumberBySlot(SlotNumber(1)); + expect(result).toBeUndefined(); + }); + it('getBlockHeaderByHash', async () => { const result = await context.client.getBlockHeaderByHash(BlockHash.random()); expect(result).toBeInstanceOf(BlockHeader); @@ -358,8 +378,8 @@ describe('ArchiverApiSchema', () => { expect(result).toBe(1n); }); - it('getProposedCheckpoint', async () => { - const result = await context.client.getProposedCheckpoint(); + it('getLastCheckpoint', async () => { + const result = await context.client.getLastCheckpoint(); expect(result).toEqual({ checkpointNumber: 1, header: expect.any(CheckpointHeader), @@ -372,8 +392,8 @@ describe('ArchiverApiSchema', () => { }); }); - it('getProposedCheckpointOnly', async () => { - const result = await context.client.getProposedCheckpointOnly(); + it('getLastProposedCheckpoint', async () => { + const result = await context.client.getLastProposedCheckpoint(); expect(result).toEqual({ checkpointNumber: 1, header: expect.any(CheckpointHeader), @@ -419,10 +439,10 @@ class MockArchiver implements ArchiverApi { getPendingChainValidationStatus(): Promise { return Promise.resolve({ valid: true }); } - getProposedCheckpoint(): Promise { - return this.getProposedCheckpointOnly(); + getLastCheckpoint(): Promise { + return this.getLastProposedCheckpoint(); } - getProposedCheckpointOnly(): Promise { + getLastProposedCheckpoint(): Promise { return Promise.resolve({ checkpointNumber: CheckpointNumber(1), header: CheckpointHeader.random(), @@ -571,11 +591,24 @@ class MockArchiver implements ArchiverApi { checkpointOutHash: checkpoint.getCheckpointOutHash(), startBlock: BlockNumber(1), blockCount: checkpoint.blocks.length, + feeAssetPriceModifier: 0n, attestations: [CommitteeAttestation.random()], l1: L1PublishedData.random(), }, ]; } + getCheckpointData(_n: CheckpointNumber): Promise { + return Promise.resolve(undefined); + } + getCheckpointDataRange(_from: CheckpointNumber, _limit: number): Promise { + return Promise.resolve([]); + } + getCheckpointNumberBySlot(_slot: SlotNumber): Promise { + return Promise.resolve(undefined); + } + getBlockDataWithCheckpointContext(_n: BlockNumber) { + return Promise.resolve(undefined); + } async getCheckpointedBlocksForEpoch(epochNumber: EpochNumber): Promise { expect(epochNumber).toEqual(EpochNumber(1)); const block = await L2Block.random(BlockNumber(Number(epochNumber))); diff --git a/yarn-project/stdlib/src/interfaces/archiver.ts b/yarn-project/stdlib/src/interfaces/archiver.ts index 43af8f6f341e..2fb4049ca4da 100644 --- a/yarn-project/stdlib/src/interfaces/archiver.ts +++ b/yarn-project/stdlib/src/interfaces/archiver.ts @@ -4,7 +4,7 @@ import type { ApiSchemaFor } from '@aztec/foundation/schemas'; import { z } from 'zod'; -import { BlockDataSchema } from '../block/block_data.js'; +import { BlockDataSchema, BlockDataWithCheckpointContextSchema } from '../block/block_data.js'; import { BlockHash } from '../block/block_hash.js'; import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import { L2Block } from '../block/l2_block.js'; @@ -117,6 +117,16 @@ export const ArchiverApiSchema: ApiSchemaFor = { getBlockHeaderByArchive: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), getBlockData: z.function().args(BlockNumberSchema).returns(BlockDataSchema.optional()), getBlockDataByArchive: z.function().args(schemas.Fr).returns(BlockDataSchema.optional()), + getBlockDataWithCheckpointContext: z + .function() + .args(BlockNumberSchema) + .returns(BlockDataWithCheckpointContextSchema.optional()), + getCheckpointData: z.function().args(CheckpointNumberSchema).returns(CheckpointDataSchema.optional()), + getCheckpointDataRange: z + .function() + .args(CheckpointNumberSchema, schemas.Integer) + .returns(z.array(CheckpointDataSchema)), + getCheckpointNumberBySlot: z.function().args(schemas.SlotNumber).returns(CheckpointNumberSchema.optional()), getL2Block: z.function().args(BlockNumberSchema).returns(L2Block.schema.optional()), getL2BlockByHash: z.function().args(BlockHash.schema).returns(L2Block.schema.optional()), getL2BlockByArchive: z.function().args(schemas.Fr).returns(L2Block.schema.optional()), @@ -158,8 +168,8 @@ export const ArchiverApiSchema: ApiSchemaFor = { .args() .returns(z.object({ genesisArchiveRoot: schemas.Fr })), getL1Timestamp: z.function().args().returns(schemas.BigInt.optional()), - getProposedCheckpoint: z.function().args().returns(ProposedCheckpointDataSchema.optional()), - getProposedCheckpointOnly: z.function().args().returns(ProposedCheckpointDataSchema.optional()), + getLastCheckpoint: z.function().args().returns(ProposedCheckpointDataSchema.optional()), + getLastProposedCheckpoint: z.function().args().returns(ProposedCheckpointDataSchema.optional()), syncImmediate: z.function().args().returns(z.void()), isPendingChainInvalid: z.function().args().returns(z.boolean()), getPendingChainValidationStatus: z.function().args().returns(ValidateCheckpointResultSchema), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts index e8533579caad..8b42f9ba73dd 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.test.ts @@ -135,6 +135,7 @@ class MockAztecNodeAdmin implements AztecNodeAdmin { signingTimeoutMs: 3000, maxStuckDutiesAgeMs: 72000, dataStoreMapSizeKb: 128 * 1024 * 1024, + l1ChainId: 1, l1Contracts: { rollupAddress: EthAddress.random(), }, diff --git a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts index 45d098d2b265..fb4e062682b9 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node-admin.ts @@ -81,6 +81,8 @@ export type AztecNodeAdminConfig = Omit & { maxPendingTxCount: number; + // Keep in sync with P2PConfig.skipIncomingProposals (circular dep prevents Pick here) + skipIncomingProposals?: boolean; }; export const AztecNodeAdminConfigSchema = SequencerConfigSchema.merge(ProverConfigSchema) @@ -93,7 +95,7 @@ export const AztecNodeAdminConfigSchema = SequencerConfigSchema.merge(ProverConf skipValidateCheckpointAttestations: true, }), ) - .merge(z.object({ maxPendingTxCount: z.number() })); + .merge(z.object({ maxPendingTxCount: z.number(), skipIncomingProposals: z.boolean().optional() })); export const AztecNodeAdminApiSchema: ApiSchemaFor = { getConfig: z.function().returns(AztecNodeAdminConfigSchema), diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts index 26dfaef2d577..f648ae991c47 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.test.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.test.ts @@ -1,8 +1,6 @@ import { ARCHIVE_HEIGHT, L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT } from '@aztec/constants'; import { type L1ContractAddresses, L1ContractsNames } from '@aztec/ethereum/l1-contract-addresses'; import { BlockNumber, CheckpointNumber, EpochNumber, SlotNumber } from '@aztec/foundation/branded-types'; -import { Buffer32 } from '@aztec/foundation/buffer'; -import { timesAsync } from '@aztec/foundation/collection'; import { randomInt } from '@aztec/foundation/crypto/random'; import { Fr } from '@aztec/foundation/curves/bn254'; import { memoize } from '@aztec/foundation/decorators'; @@ -16,10 +14,8 @@ import times from 'lodash.times'; import type { ContractArtifact } from '../abi/abi.js'; import { AztecAddress } from '../aztec-address/index.js'; import type { DataInBlock } from '../block/in_block.js'; -import { type BlockData, BlockHash, type BlockParameter, CommitteeAttestation, L2Block } from '../block/index.js'; +import { BlockHash, type BlockParameter, type CheckpointedL2Block } from '../block/index.js'; import type { L2Tips } from '../block/l2_block_source.js'; -import { Checkpoint } from '../checkpoint/checkpoint.js'; -import { L1PublishedData, PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; import { type ContractClassPublic, type ContractInstanceWithAddress, @@ -53,6 +49,10 @@ import type { SingleValidatorStats, ValidatorsStats } from '../validators/types. import type { AllowedElement } from './allowed_element.js'; import { MAX_RPC_LEN } from './api_limit.js'; import { type AztecNode, AztecNodeApiSchema } from './aztec-node.js'; +import type { BlockIncludeOptions, BlockResponse } from './block_response.js'; +import type { ChainTip, ChainTips } from './chain_tips.js'; +import type { CheckpointParameter } from './checkpoint_parameter.js'; +import type { CheckpointIncludeOptions, CheckpointResponse } from './checkpoint_response.js'; import type { SequencerConfig } from './configs.js'; import type { GetContractClassLogsResponse, GetPublicLogsResponse } from './get_logs_response.js'; import type { ProverConfig } from './prover-client.js'; @@ -84,8 +84,8 @@ describe('AztecNodeApiSchema', () => { expect([...tested].sort()).toEqual(all.sort()); }); - it('getL2Tips', async () => { - const result = await context.client.getL2Tips(); + it('getChainTips', async () => { + const result = await context.client.getChainTips(); const expectedTipId = { block: { number: 1, hash: `0x01` }, checkpoint: { number: 1, hash: `0x01` }, @@ -93,7 +93,6 @@ describe('AztecNodeApiSchema', () => { expect(result).toEqual({ proposed: { number: 1, hash: `0x01` }, checkpointed: expectedTipId, - proposedCheckpoint: expectedTipId, proven: expectedTipId, finalized: expectedTipId, }); @@ -162,27 +161,37 @@ describe('AztecNodeApiSchema', () => { it('getBlock', async () => { const response = await context.client.getBlock(BlockNumber(1)); - expect(response).toBeInstanceOf(L2Block); + expect(response).toBeUndefined(); }); - it('getBlockByHash', async () => { - const response = await context.client.getBlockByHash(BlockHash.random()); - expect(response).toBeInstanceOf(L2Block); + it('getBlockHeader', async () => { + const response = await context.client.getBlockHeader(BlockNumber(1)); + expect(response).toEqual(BlockHeader.empty()); }); - it('getBlockByArchive', async () => { - const response = await context.client.getBlockByArchive(Fr.random()); - expect(response).toBeInstanceOf(L2Block); + it('getCheckpointedBlocks', async () => { + const response = await context.client.getCheckpointedBlocks(BlockNumber(1), 1); + expect(response).toEqual([]); }); - it('getBlockHeader', async () => { - const response = await context.client.getBlockHeader(BlockHash.random()); - expect(response).toBeInstanceOf(BlockHeader); + it('getL2Tips', async () => { + const response = await context.client.getL2Tips(); + const tipId = { + block: { number: 1, hash: `0x01` }, + checkpoint: { number: 1, hash: `0x01` }, + }; + expect(response).toEqual({ + proposed: { number: 1, hash: `0x01` }, + checkpointed: tipId, + proposedCheckpoint: tipId, + proven: tipId, + finalized: tipId, + }); }); - it('getBlockHeaderByArchive', async () => { - const response = await context.client.getBlockHeaderByArchive(Fr.random()); - expect(response).toBeInstanceOf(BlockHeader); + it('getCheckpoint', async () => { + const response = await context.client.getCheckpoint(CheckpointNumber(1)); + expect(response).toBeUndefined(); }); it('getCurrentMinFees', async () => { @@ -201,23 +210,14 @@ describe('AztecNodeApiSchema', () => { }); it('getBlockNumber', async () => { - const response = await context.client.getBlockNumber(); - expect(response).toBe(BlockNumber(1)); - }); - - it('getProvenBlockNumber', async () => { - const response = await context.client.getProvenBlockNumber(); - expect(response).toBe(BlockNumber(1)); - }); - - it('getCheckpointedBlockNumber', async () => { - const response = await context.client.getCheckpointedBlockNumber(); - expect(response).toBe(BlockNumber(1)); + expect(await context.client.getBlockNumber()).toBe(BlockNumber(1)); + expect(await context.client.getBlockNumber('proven')).toBe(BlockNumber(1)); + expect(await context.client.getBlockNumber('checkpointed')).toBe(BlockNumber(1)); }); it('getCheckpointNumber', async () => { - const response = await context.client.getCheckpointNumber(); - expect(response).toBe(CheckpointNumber(1)); + expect(await context.client.getCheckpointNumber()).toBe(CheckpointNumber(1)); + expect(await context.client.getCheckpointNumber('proven')).toBe(CheckpointNumber(1)); }); it('isReady', async () => { @@ -239,9 +239,8 @@ describe('AztecNodeApiSchema', () => { }); it('getBlocks', async () => { - const response = await context.client.getBlocks(BlockNumber(1), BlockNumber(1)); - expect(response).toHaveLength(1); - expect(response[0]).toBeInstanceOf(L2Block); + const response = await context.client.getBlocks(BlockNumber(1), 1); + expect(response).toEqual([]); await expect(context.client.getBlocks(-1 as BlockNumber, BlockNumber(1))).rejects.toThrow(); await expect(context.client.getBlocks(BlockNumber.ZERO, BlockNumber(1))).rejects.toThrow(); @@ -251,12 +250,6 @@ describe('AztecNodeApiSchema', () => { it('getCheckpoints', async () => { const response = await context.client.getCheckpoints(CheckpointNumber(1), 1); - expect(response).toHaveLength(1); - expect(response[0]).toBeInstanceOf(PublishedCheckpoint); - }); - - it('getCheckpointedBlocks', async () => { - const response = await context.client.getCheckpointedBlocks(BlockNumber(1), 1); expect(response).toEqual([]); }); @@ -354,11 +347,6 @@ describe('AztecNodeApiSchema', () => { expect(response).toBeInstanceOf(Fr); }); - it('getBlockHeader', async () => { - const response = await context.client.getBlockHeader(); - expect(response).toBeInstanceOf(BlockHeader); - }); - it('getValidatorsStats', async () => { handler.validatorStats = { stats: { @@ -527,7 +515,7 @@ class MockAztecNode implements AztecNode { }); } - getL2Tips(): Promise { + getChainTips(): Promise { const tipId = { block: { number: BlockNumber(1), hash: `0x01` }, checkpoint: { number: CheckpointNumber(1), hash: `0x01` }, @@ -535,13 +523,38 @@ class MockAztecNode implements AztecNode { return Promise.resolve({ proposed: { number: BlockNumber(1), hash: `0x01` }, checkpointed: tipId, - proposedCheckpoint: tipId, proven: tipId, finalized: tipId, }); } - getCheckpointedBlocks(_from: BlockNumber, _limit: number) { + getBlock( + _param: BlockParameter, + _options?: Opts, + ): Promise | undefined> { + return Promise.resolve(undefined); + } + + getBlocks( + _from: BlockNumber, + _limit: number, + _options?: Opts, + ): Promise[]> { + return Promise.resolve([]); + } + + getCheckpoint( + _param: CheckpointParameter, + _options?: Opts, + ): Promise | undefined> { + return Promise.resolve(undefined); + } + + getCheckpoints( + _from: CheckpointNumber, + _limit: number, + _options?: Opts, + ): Promise[]> { return Promise.resolve([]); } @@ -549,6 +562,28 @@ class MockAztecNode implements AztecNode { return Promise.resolve([]); } + getL2Tips(): Promise { + const tipId = { + block: { number: BlockNumber(1), hash: `0x01` }, + checkpoint: { number: CheckpointNumber(1), hash: `0x01` }, + }; + return Promise.resolve({ + proposed: { number: BlockNumber(1), hash: `0x01` }, + checkpointed: tipId, + proposedCheckpoint: tipId, + proven: tipId, + finalized: tipId, + }); + } + + getBlockHeader(_number: BlockNumber | 'latest'): Promise { + return Promise.resolve(BlockHeader.empty()); + } + + getCheckpointedBlocks(_from: BlockNumber, _limit: number): Promise { + return Promise.resolve([]); + } + findLeavesIndexes( referenceBlock: BlockParameter, treeId: MerkleTreeId, @@ -641,25 +676,6 @@ class MockAztecNode implements AztecNode { expect(leafSlot).toBeInstanceOf(Fr); return Promise.resolve(PublicDataWitness.random()); } - getBlock(blockParameter: BlockParameter): Promise { - const blockNum = blockParameter === 'latest' ? BlockNumber(1) : (blockParameter as BlockNumber); - return L2Block.random(blockNum); - } - getBlockByHash(_blockHash: BlockHash): Promise { - return L2Block.random(BlockNumber(1)); - } - getBlockByArchive(_archive: Fr): Promise { - return L2Block.random(BlockNumber(1)); - } - getBlockHeaderByArchive(_archive: Fr): Promise { - return Promise.resolve(BlockHeader.empty()); - } - getBlockData(_number: BlockNumber): Promise { - return Promise.resolve(undefined); - } - getBlockDataByArchive(_archive: Fr): Promise { - return Promise.resolve(undefined); - } getCurrentMinFees(): Promise { return Promise.resolve(GasFees.empty()); } @@ -669,16 +685,10 @@ class MockAztecNode implements AztecNode { getMaxPriorityFees(): Promise { return Promise.resolve(GasFees.empty()); } - getBlockNumber(): Promise { + getBlockNumber(_tip?: ChainTip): Promise { return Promise.resolve(BlockNumber(1)); } - getProvenBlockNumber(): Promise { - return Promise.resolve(BlockNumber(1)); - } - getCheckpointedBlockNumber(): Promise { - return Promise.resolve(BlockNumber(1)); - } - getCheckpointNumber(): Promise { + getCheckpointNumber(_tip?: ChainTip): Promise { return Promise.resolve(CheckpointNumber(1)); } isReady(): Promise { @@ -700,22 +710,6 @@ class MockAztecNode implements AztecNode { realProofs: true, }; } - getBlocks(from: number, limit: number): Promise { - return Promise.all( - Array(limit) - .fill(0) - .map(i => L2Block.random(BlockNumber(from + i))), - ); - } - getCheckpoints(from: CheckpointNumber, limit: number): Promise { - return timesAsync(limit, async i => - PublishedCheckpoint.from({ - checkpoint: await Checkpoint.random(CheckpointNumber(from + i)), - attestations: [CommitteeAttestation.random()], - l1: new L1PublishedData(1n, 1n, Buffer32.random().toString()), - }), - ); - } getNodeVersion(): Promise { return Promise.resolve('1.0.0'); } @@ -801,9 +795,6 @@ class MockAztecNode implements AztecNode { expect(slot).toBeInstanceOf(Fr); return Promise.resolve(Fr.random()); } - getBlockHeader(_block?: BlockParameter): Promise { - return Promise.resolve(BlockHeader.empty()); - } getValidatorsStats(): Promise { return Promise.resolve(this.validatorStats!); } diff --git a/yarn-project/stdlib/src/interfaces/aztec-node.ts b/yarn-project/stdlib/src/interfaces/aztec-node.ts index 57d2f2efd944..2d0660fe3183 100644 --- a/yarn-project/stdlib/src/interfaces/aztec-node.ts +++ b/yarn-project/stdlib/src/interfaces/aztec-node.ts @@ -1,13 +1,13 @@ import { ARCHIVE_HEIGHT, L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT } from '@aztec/constants'; import { type L1ContractAddresses, L1ContractAddressesSchema } from '@aztec/ethereum/l1-contract-addresses'; import { - BlockNumber, + type BlockNumber, BlockNumberPositiveSchema, BlockNumberSchema, - CheckpointNumber, + type CheckpointNumber, CheckpointNumberPositiveSchema, CheckpointNumberSchema, - EpochNumber, + type EpochNumber, EpochNumberSchema, type SlotNumber, } from '@aztec/foundation/branded-types'; @@ -23,10 +23,8 @@ import { BlockHash } from '../block/block_hash.js'; import { type BlockParameter, BlockParameterSchema } from '../block/block_parameter.js'; import { CheckpointedL2Block } from '../block/checkpointed_l2_block.js'; import { type DataInBlock, dataInBlockSchemaFor } from '../block/in_block.js'; -import { L2Block } from '../block/l2_block.js'; -import { type L2BlockSource, type L2Tips, L2TipsSchema } from '../block/l2_block_source.js'; -import { CheckpointDataSchema } from '../checkpoint/checkpoint_data.js'; -import { PublishedCheckpoint } from '../checkpoint/published_checkpoint.js'; +import { type L2Tips, L2TipsSchema } from '../block/l2_block_source.js'; +import { type CheckpointData, CheckpointDataSchema } from '../checkpoint/checkpoint_data.js'; import { type ContractClassPublic, ContractClassPublicSchema, @@ -61,6 +59,20 @@ import type { SingleValidatorStats, ValidatorsStats } from '../validators/types. import { type ComponentsVersions, getVersioningResponseHandler } from '../versioning/index.js'; import { type AllowedElement, AllowedElementSchema } from './allowed_element.js'; import { MAX_RPC_BLOCKS_LEN, MAX_RPC_CHECKPOINTS_LEN, MAX_RPC_LEN, MAX_RPC_TXS_LEN } from './api_limit.js'; +import { + type BlockIncludeOptions, + BlockIncludeOptionsSchema, + type BlockResponse, + BlockResponseSchema, +} from './block_response.js'; +import { type ChainTip, ChainTipSchema, type ChainTips, ChainTipsSchema } from './chain_tips.js'; +import { type CheckpointParameter, CheckpointParameterSchema } from './checkpoint_parameter.js'; +import { + type CheckpointIncludeOptions, + CheckpointIncludeOptionsSchema, + type CheckpointResponse, + CheckpointResponseSchema, +} from './checkpoint_response.js'; import { type GetContractClassLogsResponse, GetContractClassLogsResponseSchema, @@ -73,21 +85,7 @@ import { type WorldStateSyncStatus, WorldStateSyncStatusSchema } from './world_s * The aztec node. * We will probably implement the additional interfaces by means other than Aztec Node as it's currently a privacy leak */ -export interface AztecNode - extends Pick< - L2BlockSource, - | 'getBlocks' - | 'getCheckpoints' - | 'getBlockHeader' - | 'getL2Tips' - | 'getCheckpointedBlocks' - | 'getCheckpointsDataForEpoch' - > { - /** - * Returns the tips of the L2 chain. - */ - getL2Tips(): Promise; - +export interface AztecNode { /** * Returns the sync status of the node's world state */ @@ -202,49 +200,76 @@ export interface AztecNode getL2ToL1Messages(epoch: EpochNumber): Promise; /** - * Get a block specified by its block number or 'latest'. - * @param blockParameter - The block parameter (block number, block hash, or 'latest'). - * @returns The requested block. + * Returns the block number at a given chain tip, or the latest proposed block number when + * `tip` is omitted. */ - getBlock(blockParameter: BlockParameter): Promise; + getBlockNumber(tip?: ChainTip): Promise; /** - * Get a block specified by its hash. - * @param blockHash - The block hash being requested. - * @returns The requested block. + * Returns the checkpoint number at a given chain tip, or the latest checkpoint number when + * `tip` is omitted. + * + * @remarks **Semantic foot-gun**: block-side `'proposed'` means "latest proposed block" (chain + * head), but checkpoint-side `'proposed'` means "latest confirmed checkpoint" — pre-L1-confirm + * checkpoints are not exposed over RPC. `'checkpointed'` on the checkpoint side is equivalent. */ - getBlockByHash(blockHash: BlockHash): Promise; + getCheckpointNumber(tip?: ChainTip): Promise; - /** - * Get a block specified by its archive root. - * @param archive - The archive root being requested. - * @returns The requested block. - */ - getBlockByArchive(archive: Fr): Promise; + /** Returns the tips of the L2 chain. */ + getChainTips(): Promise; + + // TODO(spl/new-rpc-api): the following methods are kept on the interface as a stop-gap because + // `L2BlockStream` (used by PXE's block synchronizer) and `computeL2ToL1MembershipWitness` (used + // by end-to-end tests) still consume the internal archiver shapes. Remove them when those + // consumers are rewired to the unified `BlockResponse` / `CheckpointResponse` API. + /** @deprecated Scheduled for removal; use `getChainTips` for public callers. */ + getL2Tips(): Promise; + /** @deprecated Scheduled for removal; use `getBlock(param).then(r => r?.header)`. */ + getBlockHeader(number: BlockNumber | 'latest'): Promise; + /** @deprecated Scheduled for removal; use `getBlocks(from, limit, { includeL1PublishInfo: true, includeAttestations: true })`. */ + getCheckpointedBlocks(from: BlockNumber, limit: number): Promise; + /** @deprecated Scheduled for removal; use `getCheckpoints(from, limit)` over an explicit checkpoint range. */ + getCheckpointsDataForEpoch(epoch: EpochNumber): Promise; /** - * Method to fetch the latest block number synchronized by the node. - * @returns The block number. + * Unified block fetch. Returns the block identified by `param`, with optional fields controlled + * by `options`. + * @param param - A block number, block hash, archive root, chain-tip name, or object variant. + * @param options - Narrowing options: `includeTransactions`, `includeL1PublishInfo`, `includeAttestations`. */ - getBlockNumber(): Promise; + getBlock( + param: BlockParameter, + options?: Opts, + ): Promise | undefined>; /** - * Fetches the latest proven block number. - * @returns The block number. + * Returns up to `limit` blocks starting from `from`, projected to the {@link BlockResponse} + * shape determined by `options`. */ - getProvenBlockNumber(): Promise; + getBlocks( + from: BlockNumber, + limit: number, + options?: Opts, + ): Promise[]>; /** - * Fetches the latest checkpointed block number. - * @returns The block number. + * Unified checkpoint fetch. Returns the checkpoint identified by `param`, with optional fields + * controlled by `options`. */ - getCheckpointedBlockNumber(): Promise; + getCheckpoint( + param: CheckpointParameter, + options?: Opts, + ): Promise | undefined>; /** - * Method to fetch the latest checkpoint number synchronized by the node. - * @returns The checkpoint number. + * Returns up to `limit` checkpoints starting from `from`, projected to the + * {@link CheckpointResponse} shape determined by `options`. */ - getCheckpointNumber(): Promise; + getCheckpoints( + from: CheckpointNumber, + limit: number, + options?: Opts, + ): Promise[]>; /** * Method to determine if the node is ready to accept transactions. @@ -259,14 +284,6 @@ export interface AztecNode */ getNodeInfo(): Promise; - /** - * Method to request blocks. Will attempt to return all requested blocks but will return only those available. - * @param from - The start of the range of blocks to return. - * @param limit - The maximum number of blocks to return. - * @returns The blocks requested. - */ - getBlocks(from: BlockNumber, limit: number): Promise; - /** * Method to fetch the current min fees. * @returns The current min fees. @@ -434,20 +451,6 @@ export interface AztecNode */ getPublicStorageAt(referenceBlock: BlockParameter, contract: AztecAddress, slot: Fr): Promise; - /** - * Returns the block header for a given block number, block hash, or 'latest'. - * @param block - The block parameter (block number, block hash, or 'latest'). Defaults to 'latest'. - * @returns The requested block header. - */ - getBlockHeader(block?: BlockParameter): Promise; - - /** - * Get a block header specified by its archive root. - * @param archive - The archive root being requested. - * @returns The requested block header. - */ - getBlockHeaderByArchive(archive: Fr): Promise; - /** Returns stats for validators if enabled. */ getValidatorsStats(): Promise; @@ -503,8 +506,6 @@ const MAX_SIGNATURES_PER_REGISTER_CALL = 100; const MAX_SIGNATURE_LEN = 10000; export const AztecNodeApiSchema: ApiSchemaFor = { - getL2Tips: z.function().args().returns(L2TipsSchema), - getWorldStateSyncStatus: z.function().args().returns(WorldStateSyncStatusSchema), findLeavesIndexes: z @@ -551,40 +552,53 @@ export const AztecNodeApiSchema: ApiSchemaFor = { .args(EpochNumberSchema) .returns(z.array(z.array(z.array(z.array(schemas.Fr))))), - getBlock: z.function().args(BlockParameterSchema).returns(L2Block.schema.optional()), + getBlockNumber: z.function().args(optional(ChainTipSchema)).returns(BlockNumberSchema), - getBlockByHash: z.function().args(BlockHash.schema).returns(L2Block.schema.optional()), + getCheckpointNumber: z.function().args(optional(ChainTipSchema)).returns(CheckpointNumberSchema), - getBlockByArchive: z.function().args(schemas.Fr).returns(L2Block.schema.optional()), + getChainTips: z.function().args().returns(ChainTipsSchema), - getBlockNumber: z.function().returns(BlockNumberSchema), - - getCheckpointNumber: z.function().returns(CheckpointNumberSchema), + getL2Tips: z.function().args().returns(L2TipsSchema), - getProvenBlockNumber: z.function().returns(BlockNumberSchema), + getBlockHeader: z + .function() + .args(z.union([BlockNumberSchema, z.literal('latest')])) + .returns(BlockHeader.schema.optional()), - getCheckpointedBlockNumber: z.function().returns(BlockNumberSchema), + getCheckpointedBlocks: z + .function() + .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN)) + .returns(z.array(CheckpointedL2Block.schema)), - isReady: z.function().returns(z.boolean()), + getCheckpointsDataForEpoch: z.function().args(EpochNumberSchema).returns(z.array(CheckpointDataSchema)), - getNodeInfo: z.function().returns(NodeInfoSchema), + getBlock: z + .function() + .args(BlockParameterSchema, optional(BlockIncludeOptionsSchema)) + .returns(BlockResponseSchema.optional()), getBlocks: z .function() - .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN)) - .returns(z.array(L2Block.schema)), + .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN), optional(BlockIncludeOptionsSchema)) + .returns(z.array(BlockResponseSchema)), - getCheckpoints: z + getCheckpoint: z .function() - .args(CheckpointNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_CHECKPOINTS_LEN)) - .returns(z.array(PublishedCheckpoint.schema)), + .args(CheckpointParameterSchema, optional(CheckpointIncludeOptionsSchema)) + .returns(CheckpointResponseSchema.optional()), - getCheckpointedBlocks: z + getCheckpoints: z .function() - .args(BlockNumberPositiveSchema, z.number().gt(0).lte(MAX_RPC_BLOCKS_LEN)) - .returns(z.array(CheckpointedL2Block.schema)), + .args( + CheckpointNumberPositiveSchema, + z.number().gt(0).lte(MAX_RPC_CHECKPOINTS_LEN), + optional(CheckpointIncludeOptionsSchema), + ) + .returns(z.array(CheckpointResponseSchema)), - getCheckpointsDataForEpoch: z.function().args(EpochNumberSchema).returns(z.array(CheckpointDataSchema)), + isReady: z.function().returns(z.boolean()), + + getNodeInfo: z.function().returns(NodeInfoSchema), getCurrentMinFees: z.function().returns(GasFees.schema), @@ -648,10 +662,6 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getPublicStorageAt: z.function().args(BlockParameterSchema, schemas.AztecAddress, schemas.Fr).returns(schemas.Fr), - getBlockHeader: z.function().args(optional(BlockParameterSchema)).returns(BlockHeader.schema.optional()), - - getBlockHeaderByArchive: z.function().args(schemas.Fr).returns(BlockHeader.schema.optional()), - getValidatorsStats: z.function().returns(ValidatorsStatsSchema), getValidatorStats: z diff --git a/yarn-project/stdlib/src/interfaces/block_response.ts b/yarn-project/stdlib/src/interfaces/block_response.ts new file mode 100644 index 000000000000..3236855cb451 --- /dev/null +++ b/yarn-project/stdlib/src/interfaces/block_response.ts @@ -0,0 +1,79 @@ +import { + BlockNumberSchema, + CheckpointNumberSchema, + IndexWithinCheckpointSchema, +} from '@aztec/foundation/branded-types'; +import type { BlockNumber, CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types'; +import type { IfFlag, Prettify } from '@aztec/foundation/types'; + +import { z } from 'zod'; + +import { BlockHash } from '../block/block_hash.js'; +import { Body } from '../block/body.js'; +import { CommitteeAttestation } from '../block/proposal/committee_attestation.js'; +import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; +import { BlockHeader } from '../tx/block_header.js'; +import { type L1PublishInfo, L1PublishInfoSchema } from './l1_publish_info.js'; + +/** Options for narrowing the response of `getBlock` / `getBlocks`. */ +export type BlockIncludeOptions = { + /** Include the block body (tx effects). Off by default. */ + includeTransactions?: boolean; + /** Include L1 publish info (populated with `published: false` if not yet on L1). Off by default. */ + includeL1PublishInfo?: boolean; + /** Include committee attestations. Empty array until the block's checkpoint is published. Off by default. */ + includeAttestations?: boolean; +}; + +export const BlockIncludeOptionsSchema: z.ZodType = z.object({ + includeTransactions: z.boolean().optional(), + includeL1PublishInfo: z.boolean().optional(), + includeAttestations: z.boolean().optional(), +}); + +/** Required metadata always present on a {@link BlockResponse}. */ +export type BlockResponseBase = { + /** Block header. */ + header: BlockHeader; + /** Archive tree snapshot after this block. */ + archive: AppendOnlyTreeSnapshot; + /** Hash of the block header. */ + hash: BlockHash; + /** Checkpoint number this block belongs to. */ + checkpointNumber: CheckpointNumber; + /** Position of the block within its checkpoint. */ + indexWithinCheckpoint: IndexWithinCheckpoint; + /** L2 block number. */ + number: BlockNumber; +}; + +/** + * RPC-surface representation of an L2 block. + * + * Generic over the include-options so that flagged fields become required when the caller passes a + * literal `true`. The default type argument ({@link BlockIncludeOptions}) yields the widest shape + * (all include-fields optional) — this is what the JSON-RPC wire layer validates against. + * + * @example + * const b: BlockResponse<{ includeTransactions: true }> = await node.getBlock(1, { includeTransactions: true }); + * b.body; // required, not optional + */ +export type BlockResponse = Prettify< + BlockResponseBase & + IfFlag & + IfFlag & + IfFlag +>; + +/** Zod schema for the widest {@link BlockResponse} shape (all include-gated fields optional). */ +export const BlockResponseSchema = z.object({ + header: BlockHeader.schema, + archive: AppendOnlyTreeSnapshot.schema, + hash: BlockHash.schema, + checkpointNumber: CheckpointNumberSchema, + indexWithinCheckpoint: IndexWithinCheckpointSchema, + number: BlockNumberSchema, + body: Body.schema.optional(), + l1: L1PublishInfoSchema.optional(), + attestations: z.array(CommitteeAttestation.schema).optional(), +}); diff --git a/yarn-project/stdlib/src/interfaces/chain_tips.ts b/yarn-project/stdlib/src/interfaces/chain_tips.ts new file mode 100644 index 000000000000..fde42b09a136 --- /dev/null +++ b/yarn-project/stdlib/src/interfaces/chain_tips.ts @@ -0,0 +1,24 @@ +import { z } from 'zod'; + +import { type L2BlockTag, type L2Tips, L2TipsSchema } from '../block/l2_block_source.js'; + +/** + * Public chain-tip selectors usable in RPC requests. + * Omits internal-only tags (e.g. `proposedCheckpoint`) from {@link L2BlockTag}. + */ +export type ChainTip = Exclude; + +export const ChainTipSchema = z.union([ + z.literal('proposed'), + z.literal('checkpointed'), + z.literal('proven'), + z.literal('finalized'), +]) satisfies z.ZodType; + +/** + * Tips of the L2 chain. + * Omits the sequencer-internal `proposedCheckpoint` from the public RPC surface. + */ +export type ChainTips = Omit; + +export const ChainTipsSchema = L2TipsSchema.omit({ proposedCheckpoint: true }); diff --git a/yarn-project/stdlib/src/interfaces/checkpoint_parameter.ts b/yarn-project/stdlib/src/interfaces/checkpoint_parameter.ts new file mode 100644 index 000000000000..d6244456febe --- /dev/null +++ b/yarn-project/stdlib/src/interfaces/checkpoint_parameter.ts @@ -0,0 +1,22 @@ +import { CheckpointNumberSchema, SlotNumberSchema } from '@aztec/foundation/branded-types'; + +import { z } from 'zod'; + +import { ChainTipSchema } from './chain_tips.js'; + +/** + * Selector for a checkpoint in RPC calls. + * + * Accepts a numeric checkpoint number (or `{ number }`), a slot number (`{ slot }`), a chain-tip + * name (e.g. `'proven'`), or `'latest'` (alias for `'proposed'` — on the checkpoint side, this + * means the most recent confirmed checkpoint). + */ +export const CheckpointParameterSchema = z.union([ + CheckpointNumberSchema, + ChainTipSchema, + z.literal('latest'), + z.object({ number: CheckpointNumberSchema }), + z.object({ slot: SlotNumberSchema }), +]); + +export type CheckpointParameter = z.infer; diff --git a/yarn-project/stdlib/src/interfaces/checkpoint_response.ts b/yarn-project/stdlib/src/interfaces/checkpoint_response.ts new file mode 100644 index 000000000000..1c127bc06be4 --- /dev/null +++ b/yarn-project/stdlib/src/interfaces/checkpoint_response.ts @@ -0,0 +1,84 @@ +import { BlockNumberSchema, CheckpointNumberSchema } from '@aztec/foundation/branded-types'; +import type { BlockNumber, CheckpointNumber } from '@aztec/foundation/branded-types'; +import { Fr } from '@aztec/foundation/curves/bn254'; +import { schemas } from '@aztec/foundation/schemas'; +import type { IfFlag, Prettify } from '@aztec/foundation/types'; + +import { z } from 'zod'; + +import { CommitteeAttestation } from '../block/proposal/committee_attestation.js'; +import { CheckpointHeader } from '../rollup/checkpoint_header.js'; +import { AppendOnlyTreeSnapshot } from '../trees/append_only_tree_snapshot.js'; +import { type BlockResponse, BlockResponseSchema } from './block_response.js'; +import { type L1PublishInfo, L1PublishInfoSchema } from './l1_publish_info.js'; + +/** Options for narrowing the response of `getCheckpoint` / `getCheckpoints`. */ +export type CheckpointIncludeOptions = { + /** Include the nested blocks. Off by default. */ + includeBlocks?: boolean; + /** When `includeBlocks` is true, include each block's body (tx effects). Off by default. No-op if `includeBlocks` is false. */ + includeTransactions?: boolean; + /** Include L1 publish info. Off by default. */ + includeL1PublishInfo?: boolean; + /** Include committee attestations. Off by default. */ + includeAttestations?: boolean; +}; + +export const CheckpointIncludeOptionsSchema: z.ZodType = z.object({ + includeBlocks: z.boolean().optional(), + includeTransactions: z.boolean().optional(), + includeL1PublishInfo: z.boolean().optional(), + includeAttestations: z.boolean().optional(), +}); + +/** Required metadata always present on a {@link CheckpointResponse}. */ +export type CheckpointResponseBase = { + /** Checkpoint number. */ + number: CheckpointNumber; + /** Checkpoint header. */ + header: CheckpointHeader; + /** Archive tree snapshot after this checkpoint. */ + archive: AppendOnlyTreeSnapshot; + /** Hash of the checkpoint out messages. */ + checkpointOutHash: Fr; + /** First block number in this checkpoint. */ + startBlock: BlockNumber; + /** Number of blocks in this checkpoint. */ + blockCount: number; + /** Fee asset price modifier in basis points applied during this checkpoint. */ + feeAssetPriceModifier: bigint; +}; + +// Only forward `includeTransactions` to nested blocks — the other include-flags on the checkpoint +// options do not apply to the nested block responses (those carry no independent L1 / attestations). +type NestedBlockOpts = Opts extends { includeTransactions: true } ? { includeTransactions: true } : {}; + +/** + * RPC-surface representation of an L2 checkpoint. + * + * Generic over the include-options so that flagged fields become required when the caller passes a + * literal `true`. Only `includeTransactions` is forwarded to nested blocks, so + * `includeL1PublishInfo` / `includeAttestations` on a checkpoint request do not imply the same on + * its nested blocks. The default type argument ({@link CheckpointIncludeOptions}) yields the + * widest shape — what the JSON-RPC wire layer validates against. + */ +export type CheckpointResponse = Prettify< + CheckpointResponseBase & + IfFlag>[] }> & + IfFlag & + IfFlag +>; + +/** Zod schema for the widest {@link CheckpointResponse} shape (all include-gated fields optional). */ +export const CheckpointResponseSchema = z.object({ + number: CheckpointNumberSchema, + header: CheckpointHeader.schema, + archive: AppendOnlyTreeSnapshot.schema, + checkpointOutHash: schemas.Fr, + startBlock: BlockNumberSchema, + blockCount: z.number(), + feeAssetPriceModifier: schemas.BigInt, + blocks: z.array(BlockResponseSchema).optional(), + l1: L1PublishInfoSchema.optional(), + attestations: z.array(CommitteeAttestation.schema).optional(), +}); diff --git a/yarn-project/stdlib/src/interfaces/client.ts b/yarn-project/stdlib/src/interfaces/client.ts index 78b3d5d82d08..2fafa5bcd151 100644 --- a/yarn-project/stdlib/src/interfaces/client.ts +++ b/yarn-project/stdlib/src/interfaces/client.ts @@ -1,6 +1,11 @@ export * from './aztec-node.js'; export * from './aztec-node-admin.js'; export * from './aztec-node-debug.js'; +export * from './block_response.js'; +export * from './chain_tips.js'; +export * from './checkpoint_parameter.js'; +export * from './checkpoint_response.js'; +export * from './l1_publish_info.js'; export * from './private_kernel_prover.js'; export * from './get_logs_response.js'; export * from './api_limit.js'; diff --git a/yarn-project/stdlib/src/interfaces/configs.ts b/yarn-project/stdlib/src/interfaces/configs.ts index 55f9142aca33..54bfe498b232 100644 --- a/yarn-project/stdlib/src/interfaces/configs.ts +++ b/yarn-project/stdlib/src/interfaces/configs.ts @@ -83,6 +83,8 @@ export interface SequencerConfig { minBlocksForCheckpoint?: number; /** Skip publishing checkpoint proposals probability (for testing checkpoint prunes only) */ skipPublishingCheckpointsPercent?: number; + /** Skip broadcasting checkpoint and block proposals via gossipsub when proposer (for testing only) */ + skipBroadcastProposals?: boolean; } export const SequencerConfigSchema = zodFor()( @@ -124,6 +126,7 @@ export const SequencerConfigSchema = zodFor()( skipPushProposedBlocksToArchiver: z.boolean().optional(), minBlocksForCheckpoint: z.number().positive().optional(), skipPublishingCheckpointsPercent: z.number().gte(0).lte(100).optional(), + skipBroadcastProposals: z.boolean().optional(), }), ); @@ -145,7 +148,8 @@ type SequencerConfigOptionalKeys = | 'maxTxsPerCheckpoint' | 'maxL2BlockGas' | 'maxDABlockGas' - | 'redistributeCheckpointBudget'; + | 'redistributeCheckpointBudget' + | 'skipBroadcastProposals'; export type ResolvedSequencerConfig = Prettify< Required> & Pick diff --git a/yarn-project/stdlib/src/interfaces/l1_publish_info.ts b/yarn-project/stdlib/src/interfaces/l1_publish_info.ts new file mode 100644 index 000000000000..b65d79b9b122 --- /dev/null +++ b/yarn-project/stdlib/src/interfaces/l1_publish_info.ts @@ -0,0 +1,40 @@ +import { schemas } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + +import { L1PublishedData } from '../checkpoint/published_checkpoint.js'; + +/** + * L1 publication info for a block or checkpoint. + * + * A discriminated union over `published`: when `false`, the block/checkpoint has not yet been + * published to L1. When `true`, the L1 block info (number, timestamp, hash) is present. + * + * Distinct from {@link L1PublishedData}, which is always the "published" case. + */ +export type L1PublishInfo = + | { published: false } + | { published: true; blockNumber: bigint; timestamp: bigint; blockHash: string }; + +export const L1PublishInfoSchema = z.union([ + z.object({ published: z.literal(false) }), + z.object({ + published: z.literal(true), + blockNumber: schemas.BigInt, + timestamp: schemas.BigInt, + blockHash: z.string(), + }), +]); + +/** Projects the internal {@link L1PublishedData} (or its absence) to the public {@link L1PublishInfo} shape. */ +export function l1PublishInfoFromL1PublishedData(data: L1PublishedData | undefined): L1PublishInfo { + if (!data) { + return { published: false }; + } + return { + published: true, + blockNumber: data.blockNumber, + timestamp: data.timestamp, + blockHash: data.blockHash, + }; +} diff --git a/yarn-project/stdlib/src/interfaces/server.ts b/yarn-project/stdlib/src/interfaces/server.ts index 439f827bd229..96b57aa4bdbb 100644 --- a/yarn-project/stdlib/src/interfaces/server.ts +++ b/yarn-project/stdlib/src/interfaces/server.ts @@ -2,6 +2,11 @@ export * from './allowed_element.js'; export * from './api_limit.js'; export * from './archiver.js'; export * from './aztec-node.js'; +export * from './block_response.js'; +export * from './chain_tips.js'; +export * from './checkpoint_parameter.js'; +export * from './checkpoint_response.js'; +export * from './l1_publish_info.js'; export * from './block-builder.js'; export * from './configs.js'; export * from './epoch-prover.js'; diff --git a/yarn-project/stdlib/src/interfaces/validator.ts b/yarn-project/stdlib/src/interfaces/validator.ts index d16e6723be4e..bda4d3a3bc42 100644 --- a/yarn-project/stdlib/src/interfaces/validator.ts +++ b/yarn-project/stdlib/src/interfaces/validator.ts @@ -19,6 +19,7 @@ import type { PeerId } from '@libp2p/interface'; import { z } from 'zod'; import type { CommitteeAttestationsAndSigners } from '../block/index.js'; +import type { ChainConfig } from '../config/chain-config.js'; import { type LocalSignerConfig, LocalSignerConfigSchema, @@ -32,6 +33,9 @@ import { AllowedElementSchema } from './allowed_element.js'; */ export type ValidatorClientConfig = ValidatorHASignerConfig & LocalSignerConfig & { + /** The L1 chain id used for EIP-712 proposal-path signing. */ + l1ChainId: ChainConfig['l1ChainId']; + /** The private keys of the validators participating in attestation duties */ validatorPrivateKeys?: SecretValue<`0x${string}`[]>; @@ -90,6 +94,7 @@ export type ValidatorClientFullConfig = ValidatorClientConfig & export const ValidatorClientConfigSchema = zodFor>()( ValidatorHASignerConfigSchema.merge(LocalSignerConfigSchema).extend({ + l1ChainId: z.number().int().nonnegative(), validatorAddresses: z.array(schemas.EthAddress).optional(), disableValidator: z.boolean(), disabledValidators: z.array(schemas.EthAddress), diff --git a/yarn-project/stdlib/src/p2p/attestation_utils.test.ts b/yarn-project/stdlib/src/p2p/attestation_utils.test.ts index c06353f6f959..29d3dee63a95 100644 --- a/yarn-project/stdlib/src/p2p/attestation_utils.test.ts +++ b/yarn-project/stdlib/src/p2p/attestation_utils.test.ts @@ -5,19 +5,25 @@ import { Fr } from '@aztec/foundation/curves/bn254'; import { jest } from '@jest/globals'; import { CheckpointHeader } from '../rollup/index.js'; +import { TEST_COORDINATION_SIGNATURE_CONTEXT } from '../tests/mocks.js'; import { trimAttestations } from './attestation_utils.js'; import { CheckpointAttestation } from './checkpoint_attestation.js'; +import { CheckpointProposal } from './checkpoint_proposal.js'; import { ConsensusPayload } from './consensus_payload.js'; -import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; +import { getHashedSignaturePayloadTypedData } from './signature_utils.js'; function makeAttestation(signer: Secp256k1Signer): CheckpointAttestation { const header = CheckpointHeader.random({ slotNumber: SlotNumber(0) }); - const payload = new ConsensusPayload(header, Fr.random(), 0n); - const attestationHash = getHashedSignaturePayloadEthSignedMessage( - payload, - SignatureDomainSeparator.checkpointAttestation, + const payload = new ConsensusPayload(header, Fr.random(), 0n, TEST_COORDINATION_SIGNATURE_CONTEXT); + const attestationHash = getHashedSignaturePayloadTypedData(payload); + const proposal = new CheckpointProposal( + header, + payload.archive, + payload.feeAssetPriceModifier, + signer.sign(attestationHash), + TEST_COORDINATION_SIGNATURE_CONTEXT, ); - const proposalHash = getHashedSignaturePayloadEthSignedMessage(payload, SignatureDomainSeparator.checkpointProposal); + const proposalHash = getHashedSignaturePayloadTypedData(proposal); return new CheckpointAttestation(payload, signer.sign(attestationHash), signer.sign(proposalHash)); } diff --git a/yarn-project/stdlib/src/p2p/block_proposal.test.ts b/yarn-project/stdlib/src/p2p/block_proposal.test.ts index 884bac55f58c..2f6601b74506 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.test.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.test.ts @@ -1,9 +1,11 @@ // Serde test for the block proposal type import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; +import { Signature } from '@aztec/foundation/eth-signature'; -import { makeBlockProposal } from '../tests/mocks.js'; +import { TEST_COORDINATION_SIGNATURE_CONTEXT, makeBlockProposal } from '../tests/mocks.js'; import { Tx } from '../tx/tx.js'; import { BlockProposal } from './block_proposal.js'; +import { SignedTxs } from './signed_txs.js'; describe('Block Proposal serialization / deserialization', () => { const checkEquivalence = (serialized: BlockProposal, deserialized: BlockProposal) => { @@ -66,4 +68,28 @@ describe('Block Proposal serialization / deserialization', () => { expect(proposal.slotNumber).toBe(proposal.blockHeader.getSlot()); expect(proposal.blockNumber).toBe(proposal.blockHeader.getBlockNumber()); }); + + it('getSender returns undefined when inner signedTxs carries a foreign signing domain', async () => { + const account = Secp256k1Signer.random(); + const txs = await Promise.all([Tx.random(), Tx.random()]); + const proposal = await makeBlockProposal({ txs, signer: account }); + + const foreignContext = { + ...TEST_COORDINATION_SIGNATURE_CONTEXT, + chainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId + 1, + }; + const foreignSignedTxs = new SignedTxs(txs, Signature.random(), foreignContext); + const tampered = new BlockProposal( + proposal.blockHeader, + proposal.indexWithinCheckpoint, + proposal.inHash, + proposal.archiveRoot, + proposal.txHashes, + proposal.signature, + proposal.signatureContext, + foreignSignedTxs, + ); + + expect(tampered.getSender()).toBeUndefined(); + }); }); diff --git a/yarn-project/stdlib/src/p2p/block_proposal.ts b/yarn-project/stdlib/src/p2p/block_proposal.ts index 3b5cd80a59a4..de5a36e2ca64 100644 --- a/yarn-project/stdlib/src/p2p/block_proposal.ts +++ b/yarn-project/stdlib/src/p2p/block_proposal.ts @@ -5,14 +5,15 @@ import { IndexWithinCheckpoint, SlotNumber, } from '@aztec/foundation/branded-types'; -import { type BaseBuffer32, Buffer32 } from '@aztec/foundation/buffer'; +import type { BaseBuffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto/keccak'; -import { tryRecoverAddress } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import type { TypedDataDefinition } from 'viem'; + import type { L2Block } from '../block/l2_block.js'; import type { L2BlockInfo } from '../block/l2_block_info.js'; import { MAX_TXS_PER_BLOCK } from '../deserialization/index.js'; @@ -22,9 +23,15 @@ import { TxHash } from '../tx/index.js'; import type { Tx } from '../tx/tx.js'; import { Gossipable } from './gossipable.js'; import { - SignatureDomainSeparator, - getHashedSignaturePayload, - getHashedSignaturePayloadEthSignedMessage, + type CoordinationSignatureContext, + type CoordinationSignatureType, + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + type Signable, + coordinationSignatureContextEquals, + getCoordinationSignatureTypedData, + readCoordinationSignatureContext, + recoverCoordinationSigner, + serializeCoordinationSignatureContext, } from './signature_utils.js'; import { SignedTxs } from './signed_txs.js'; import { TopicType } from './topic_type.js'; @@ -49,10 +56,12 @@ export type BlockProposalOptions = { * to be included in a block within a checkpoint. This is used for non-last blocks in a slot. * The last block is sent as part of a CheckpointProposal. */ -export class BlockProposal extends Gossipable { +export class BlockProposal extends Gossipable implements Signable { static override p2pTopic = TopicType.block_proposal; - private sender: EthAddress | undefined; + readonly primaryType: CoordinationSignatureType = 'BlockProposal'; + + private cachedSender: EthAddress | undefined | null = undefined; constructor( /** The per-block header containing block state and global variables */ @@ -73,6 +82,9 @@ export class BlockProposal extends Gossipable { /** The proposer's signature over the block data */ public readonly signature: Signature, + /** The signing domain (chainId + rollupAddress) the signature is bound to */ + public readonly signatureContext: CoordinationSignatureContext, + /** The signed transactions in the block (optional, for DA guarantees) */ public readonly signedTxs?: SignedTxs, ) { @@ -114,9 +126,8 @@ export class BlockProposal extends Gossipable { * Get the payload to sign for this block proposal. * The signature is over: blockHeader + indexWithinCheckpoint + inHash + archiveRoot + txHashes */ - getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer { + getPayloadToSign(): Buffer { return serializeToBuffer([ - domainSeparator, this.blockHeader, this.indexWithinCheckpoint, this.inHash, @@ -134,7 +145,9 @@ export class BlockProposal extends Gossipable { archiveRoot: Fr, txHashes: TxHash[], txs: Tx[] | undefined, - payloadSigner: (payload: Buffer32, context: SigningContext) => Promise, + signatureContext: CoordinationSignatureContext, + proposalSigner: (typedData: TypedDataDefinition, context: SigningContext) => Promise, + txsSigner?: (typedData: TypedDataDefinition, context: SigningContext) => Promise, ): Promise { // Create a temporary proposal to get the payload to sign const tempProposal = new BlockProposal( @@ -144,6 +157,7 @@ export class BlockProposal extends Gossipable { archiveRoot, txHashes, Signature.empty(), + signatureContext, ); // Create the block signing context @@ -155,47 +169,64 @@ export class BlockProposal extends Gossipable { dutyType: DutyType.BLOCK_PROPOSAL, }; - const hashed = getHashedSignaturePayload(tempProposal, SignatureDomainSeparator.blockProposal); - const sig = await payloadSigner(hashed, blockContext); + const typedData = getCoordinationSignatureTypedData(tempProposal); + const sig = await proposalSigner(typedData, blockContext); // If txs are provided, sign them as well let signedTxs: SignedTxs | undefined; if (txs) { const txsSigningContext: SigningContext = { dutyType: DutyType.TXS }; - const txsSigner = (payload: Buffer32) => payloadSigner(payload, txsSigningContext); - signedTxs = await SignedTxs.createFromSigner(txs, txsSigner); + if (!txsSigner) { + throw new Error('signed_txs requires a typed-data signer'); + } + signedTxs = await SignedTxs.createFromSigner(txs, signatureContext, typedData => + txsSigner(typedData, txsSigningContext), + ); } - return new BlockProposal(blockHeader, indexWithinCheckpoint, inHash, archiveRoot, txHashes, sig, signedTxs); + return new BlockProposal( + blockHeader, + indexWithinCheckpoint, + inHash, + archiveRoot, + txHashes, + sig, + signatureContext, + signedTxs, + ); } /** * Lazily evaluate the sender of the proposal; result is cached. - * If there's signedTxs, also verifies the signedTxs sender matches the block proposal sender. - * @returns The sender address, or undefined if signature recovery fails or senders don't match + * If there's signedTxs, also verifies that its signing domain matches this proposal's and + * that the signedTxs sender matches the block proposal sender. This prevents a proposer + * from wrapping a foreign-chain SignedTxs bundle inside a local-chain proposal. + * @returns The sender address, or undefined if signature recovery fails or inner/outer mismatch */ getSender(): EthAddress | undefined { - if (!this.sender) { - const hashed = getHashedSignaturePayloadEthSignedMessage(this, SignatureDomainSeparator.blockProposal); - const blockSender = tryRecoverAddress(hashed, this.signature); + if (this.cachedSender === undefined) { + const blockSender = recoverCoordinationSigner(this, this.signature); - // If there's signedTxs, verify the sender matches if (blockSender && this.signedTxs) { + if (!coordinationSignatureContextEquals(this.signedTxs.signatureContext, this.signatureContext)) { + this.cachedSender = null; + return undefined; + } const txsSender = this.signedTxs.getSender(); if (!txsSender || !txsSender.equals(blockSender)) { - return undefined; // Sender mismatch - fail + this.cachedSender = null; + return undefined; } } - // Cache the sender for later use - this.sender = blockSender; + this.cachedSender = blockSender ?? null; } - return this.sender; + return this.cachedSender ?? undefined; } getPayload() { - return this.getPayloadToSign(SignatureDomainSeparator.blockProposal); + return this.getPayloadToSign(); } toBuffer(): Buffer { @@ -205,6 +236,7 @@ export class BlockProposal extends Gossipable { this.inHash, this.archiveRoot, this.signature, + serializeCoordinationSignatureContext(this.signatureContext), this.txHashes.length, this.txHashes, ]; @@ -225,6 +257,7 @@ export class BlockProposal extends Gossipable { const inHash = reader.readObject(Fr); const archiveRoot = reader.readObject(Fr); const signature = reader.readObject(Signature); + const signatureContext = readCoordinationSignatureContext(reader); const txHashCount = reader.readNumber(); if (txHashCount > MAX_TXS_PER_BLOCK) { throw new Error(`txHashes count ${txHashCount} exceeds maximum ${MAX_TXS_PER_BLOCK}`); @@ -242,12 +275,21 @@ export class BlockProposal extends Gossipable { archiveRoot, txHashes, signature, + signatureContext, signedTxs, ); } } - return new BlockProposal(blockHeader, indexWithinCheckpoint, inHash, archiveRoot, txHashes, signature); + return new BlockProposal( + blockHeader, + indexWithinCheckpoint, + inHash, + archiveRoot, + txHashes, + signature, + signatureContext, + ); } getSize(): number { @@ -257,6 +299,8 @@ export class BlockProposal extends Gossipable { this.inHash.size + this.archiveRoot.size + this.signature.getSize() + + 4 /* chainId */ + + 20 /* rollupAddress */ + 4 /* txHashes.length */ + this.txHashes.length * TxHash.SIZE + 4 /* hasSignedTxs flag */ + @@ -265,7 +309,15 @@ export class BlockProposal extends Gossipable { } static empty(): BlockProposal { - return new BlockProposal(BlockHeader.empty(), IndexWithinCheckpoint(0), Fr.ZERO, Fr.ZERO, [], Signature.empty()); + return new BlockProposal( + BlockHeader.empty(), + IndexWithinCheckpoint(0), + Fr.ZERO, + Fr.ZERO, + [], + Signature.empty(), + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + ); } static random(): BlockProposal { @@ -276,6 +328,7 @@ export class BlockProposal extends Gossipable { Fr.random(), [TxHash.random(), TxHash.random()], Signature.random(), + EMPTY_COORDINATION_SIGNATURE_CONTEXT, ); } @@ -287,6 +340,8 @@ export class BlockProposal extends Gossipable { archiveRoot: this.archiveRoot.toString(), signature: this.signature.toString(), txHashes: this.txHashes.map(h => h.toString()), + chainId: this.signatureContext.chainId, + rollupAddress: this.signatureContext.rollupAddress.toString(), }; } @@ -312,6 +367,7 @@ export class BlockProposal extends Gossipable { this.archiveRoot, this.txHashes, this.signature, + this.signatureContext, ); } } diff --git a/yarn-project/stdlib/src/p2p/checkpoint_attestation.ts b/yarn-project/stdlib/src/p2p/checkpoint_attestation.ts index c99298b34bd4..f3550d638e20 100644 --- a/yarn-project/stdlib/src/p2p/checkpoint_attestation.ts +++ b/yarn-project/stdlib/src/p2p/checkpoint_attestation.ts @@ -1,7 +1,6 @@ import { CheckpointAttestationHash, SlotNumber } from '@aztec/foundation/branded-types'; import type { BaseBuffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto/keccak'; -import { tryRecoverAddress } from '@aztec/foundation/crypto/secp256k1-signer'; import type { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; @@ -13,7 +12,7 @@ import type { ZodFor } from '../schemas/index.js'; import { CheckpointProposal } from './checkpoint_proposal.js'; import { ConsensusPayload } from './consensus_payload.js'; import { Gossipable } from './gossipable.js'; -import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from './signature_utils.js'; +import { type CoordinationSignatureContext, recoverCoordinationSigner } from './signature_utils.js'; import { TopicType } from './topic_type.js'; export type { CheckpointAttestationHash } from '@aztec/foundation/branded-types'; @@ -27,8 +26,8 @@ export type { CheckpointAttestationHash } from '@aztec/foundation/branded-types' export class CheckpointAttestation extends Gossipable { static override p2pTopic = TopicType.checkpoint_attestation; - private sender: EthAddress | undefined; - private proposer: EthAddress | undefined; + private cachedSender: EthAddress | undefined | null = undefined; + private cachedProposer: EthAddress | undefined | null = undefined; constructor( /** The payload of the message, and what the signature is over */ @@ -65,22 +64,19 @@ export class CheckpointAttestation extends Gossipable { return this.payload.header.slotNumber; } + get signatureContext(): CoordinationSignatureContext { + return this.payload.signatureContext; + } + /** * Lazily evaluate and cache the signer of the attestation * @returns The signer of the attestation, or undefined if signature recovery fails */ getSender(): EthAddress | undefined { - if (!this.sender) { - // Recover the sender from the attestation - const hashed = getHashedSignaturePayloadEthSignedMessage( - this.payload, - SignatureDomainSeparator.checkpointAttestation, - ); - // Cache the sender for later use - this.sender = tryRecoverAddress(hashed, this.signature); + if (this.cachedSender === undefined) { + this.cachedSender = recoverCoordinationSigner(this.payload, this.signature) ?? null; } - - return this.sender; + return this.cachedSender ?? undefined; } /** @@ -88,7 +84,7 @@ export class CheckpointAttestation extends Gossipable { * @returns The proposer of the checkpoint */ getProposer(): EthAddress | undefined { - if (!this.proposer) { + if (this.cachedProposer === undefined) { // Create a temporary CheckpointProposal to recover the proposer address. // We need to use CheckpointProposal because it has a different getPayloadToSign() // implementation than ConsensusPayload (uses serializeToBuffer vs ABI encoding). @@ -97,16 +93,15 @@ export class CheckpointAttestation extends Gossipable { this.payload.archive, this.payload.feeAssetPriceModifier, this.proposerSignature, + this.payload.signatureContext, ); - // Cache the proposer for later use - this.proposer = proposal.getSender(); + this.cachedProposer = proposal.getSender() ?? null; } - - return this.proposer; + return this.cachedProposer ?? undefined; } getPayload(): Buffer { - return this.payload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation); + return this.payload.getPayloadToSign(); } toBuffer(): Buffer { diff --git a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts index 91cc1b764f9d..dc832cda5138 100644 --- a/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts +++ b/yarn-project/stdlib/src/p2p/checkpoint_proposal.ts @@ -4,14 +4,15 @@ import { IndexWithinCheckpoint, SlotNumber, } from '@aztec/foundation/branded-types'; -import { type BaseBuffer32, Buffer32 } from '@aztec/foundation/buffer'; +import type { BaseBuffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto/keccak'; -import { tryRecoverAddress } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { BufferReader, serializeSignedBigInt, serializeToBuffer } from '@aztec/foundation/serialize'; +import type { TypedDataDefinition } from 'viem'; + import type { L2BlockInfo } from '../block/l2_block_info.js'; import { MAX_TXS_PER_BLOCK } from '../deserialization/index.js'; import { DutyType, type SigningContext } from '../ha-signing/index.js'; @@ -22,9 +23,14 @@ import type { Tx } from '../tx/tx.js'; import { BlockProposal } from './block_proposal.js'; import { Gossipable } from './gossipable.js'; import { - SignatureDomainSeparator, - getHashedSignaturePayload, - getHashedSignaturePayloadEthSignedMessage, + type CoordinationSignatureContext, + type CoordinationSignatureType, + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + type Signable, + getCoordinationSignatureTypedData, + readCoordinationSignatureContext, + recoverCoordinationSigner, + serializeCoordinationSignatureContext, } from './signature_utils.js'; import { SignedTxs } from './signed_txs.js'; import { TopicType } from './topic_type.js'; @@ -69,10 +75,12 @@ export type CheckpointLastBlock = Omit & { * It includes the aggregated checkpoint header that validators will attest to, plus optionally * the last block's info for nodes to re-execute. This marks the completion of a slot's worth of blocks. */ -export class CheckpointProposal extends Gossipable { +export class CheckpointProposal extends Gossipable implements Signable { static override p2pTopic = TopicType.checkpoint_proposal; - private sender: EthAddress | undefined; + readonly primaryType: CoordinationSignatureType = 'CheckpointProposal'; + + private cachedSender: EthAddress | undefined | null = undefined; constructor( /** The aggregated checkpoint header for consensus */ @@ -87,6 +95,9 @@ export class CheckpointProposal extends Gossipable { /** The proposer's signature over the checkpoint payload (checkpointHeader + archive + feeAssetPriceModifier) */ public readonly signature: Signature, + /** The signing domain (chainId + rollupAddress) the signature is bound to */ + public readonly signatureContext: CoordinationSignatureContext, + /** Optional last block info, including its own signature for BlockProposal extraction */ public readonly lastBlock?: CheckpointLastBlock, ) { @@ -117,6 +128,7 @@ export class CheckpointProposal extends Gossipable { this.archive, this.lastBlock.txHashes, this.lastBlock.signature, + this.signatureContext, this.lastBlock.signedTxs, ); } @@ -148,13 +160,8 @@ export class CheckpointProposal extends Gossipable { * Get the payload to sign for this checkpoint proposal. * The signature is over the checkpoint header + archive root + feeAssetPriceModifier (for consensus). */ - getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer { - return serializeToBuffer([ - domainSeparator, - this.checkpointHeader, - this.archive, - serializeSignedBigInt(this.feeAssetPriceModifier), - ]); + getPayloadToSign(): Buffer { + return serializeToBuffer([this.checkpointHeader, this.archive, serializeSignedBigInt(this.feeAssetPriceModifier)]); } static async createProposalFromSigner( @@ -163,7 +170,8 @@ export class CheckpointProposal extends Gossipable { checkpointNumber: CheckpointNumber, feeAssetPriceModifier: bigint, lastBlockProposal: BlockProposal | undefined, - payloadSigner: (payload: Buffer32, context: SigningContext) => Promise, + signatureContext: CoordinationSignatureContext, + payloadSigner: (typedData: TypedDataDefinition, context: SigningContext) => Promise, ): Promise { // Sign the checkpoint payload with CHECKPOINT_PROPOSAL duty type const tempProposal = new CheckpointProposal( @@ -171,22 +179,23 @@ export class CheckpointProposal extends Gossipable { archiveRoot, feeAssetPriceModifier, Signature.empty(), + signatureContext, ); - const checkpointHash = getHashedSignaturePayload(tempProposal, SignatureDomainSeparator.checkpointProposal); - const checkpointContext: SigningContext = { slot: checkpointHeader.slotNumber, checkpointNumber, dutyType: DutyType.CHECKPOINT_PROPOSAL, }; - const checkpointSignature = await payloadSigner(checkpointHash, checkpointContext); + const typedData = getCoordinationSignatureTypedData(tempProposal); + const checkpointSignature = await payloadSigner(typedData, checkpointContext); return new CheckpointProposal( checkpointHeader, archiveRoot, feeAssetPriceModifier, checkpointSignature, + signatureContext, lastBlockProposal, ); } @@ -197,28 +206,26 @@ export class CheckpointProposal extends Gossipable { * @returns The sender address, or undefined if signature recovery fails or senders don't match */ getSender(): EthAddress | undefined { - if (!this.sender) { - const hashed = getHashedSignaturePayloadEthSignedMessage(this, SignatureDomainSeparator.checkpointProposal); - const checkpointSender = tryRecoverAddress(hashed, this.signature); + if (this.cachedSender === undefined) { + const checkpointSender = recoverCoordinationSigner(this, this.signature); - // If there's a lastBlock, verify the block proposal sender matches if (checkpointSender && this.lastBlock) { const blockProposal = this.getBlockProposal(); const blockSender = blockProposal?.getSender(); if (!blockSender || !blockSender.equals(checkpointSender)) { - return undefined; // Sender mismatch - fail + this.cachedSender = null; + return undefined; } } - // Cache the sender for later use - this.sender = checkpointSender; + this.cachedSender = checkpointSender ?? null; } - return this.sender; + return this.cachedSender ?? undefined; } getPayload() { - return this.getPayloadToSign(SignatureDomainSeparator.checkpointProposal); + return this.getPayloadToSign(); } toBuffer(): Buffer { @@ -227,6 +234,7 @@ export class CheckpointProposal extends Gossipable { this.archive, serializeSignedBigInt(this.feeAssetPriceModifier), this.signature, + serializeCoordinationSignatureContext(this.signatureContext), ]; if (this.lastBlock) { @@ -256,6 +264,7 @@ export class CheckpointProposal extends Gossipable { const archive = reader.readObject(Fr); const feeAssetPriceModifier = reader.readInt256(); const signature = reader.readObject(Signature); + const signatureContext = readCoordinationSignatureContext(reader); const hasLastBlock = reader.readNumber(); @@ -277,7 +286,7 @@ export class CheckpointProposal extends Gossipable { } } - return new CheckpointProposal(checkpointHeader, archive, feeAssetPriceModifier, signature, { + return new CheckpointProposal(checkpointHeader, archive, feeAssetPriceModifier, signature, signatureContext, { blockHeader, indexWithinCheckpoint, txHashes, @@ -286,7 +295,7 @@ export class CheckpointProposal extends Gossipable { }); } - return new CheckpointProposal(checkpointHeader, archive, feeAssetPriceModifier, signature); + return new CheckpointProposal(checkpointHeader, archive, feeAssetPriceModifier, signature, signatureContext); } getSize(): number { @@ -295,6 +304,8 @@ export class CheckpointProposal extends Gossipable { this.archive.size + this.signature.getSize() + 8 /* feeAssetPriceModifier */ + + 4 /* chainId */ + + 20 /* rollupAddress */ + 4; /* hasLastBlock flag */ if (this.lastBlock) { @@ -312,16 +323,29 @@ export class CheckpointProposal extends Gossipable { } static empty(): CheckpointProposal { - return new CheckpointProposal(CheckpointHeader.empty(), Fr.ZERO, 0n, Signature.empty()); + return new CheckpointProposal( + CheckpointHeader.empty(), + Fr.ZERO, + 0n, + Signature.empty(), + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + ); } static random(): CheckpointProposal { - return new CheckpointProposal(CheckpointHeader.random(), Fr.random(), 0n, Signature.random(), { - blockHeader: BlockHeader.random(), - indexWithinCheckpoint: IndexWithinCheckpoint(Math.floor(Math.random() * 5)), - txHashes: [TxHash.random(), TxHash.random()], - signature: Signature.random(), - }); + return new CheckpointProposal( + CheckpointHeader.random(), + Fr.random(), + 0n, + Signature.random(), + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + { + blockHeader: BlockHeader.random(), + indexWithinCheckpoint: IndexWithinCheckpoint(Math.floor(Math.random() * 5)), + txHashes: [TxHash.random(), TxHash.random()], + signature: Signature.random(), + }, + ); } toInspect() { @@ -330,6 +354,8 @@ export class CheckpointProposal extends Gossipable { archive: this.archive.toString(), signature: this.signature.toString(), feeAssetPriceModifier: this.feeAssetPriceModifier.toString(), + chainId: this.signatureContext.chainId, + rollupAddress: this.signatureContext.rollupAddress.toString(), lastBlock: this.lastBlock ? { blockHeader: this.lastBlock.blockHeader.toInspect(), @@ -346,7 +372,13 @@ export class CheckpointProposal extends Gossipable { * Used when the lastBlock has been extracted and stored separately. */ toCore(): CheckpointProposalCore { - return new CheckpointProposal(this.checkpointHeader, this.archive, this.feeAssetPriceModifier, this.signature); + return new CheckpointProposal( + this.checkpointHeader, + this.archive, + this.feeAssetPriceModifier, + this.signature, + this.signatureContext, + ); } } diff --git a/yarn-project/stdlib/src/p2p/consensus_payload.ts b/yarn-project/stdlib/src/p2p/consensus_payload.ts index 35b65afba951..3054f02d5724 100644 --- a/yarn-project/stdlib/src/p2p/consensus_payload.ts +++ b/yarn-project/stdlib/src/p2p/consensus_payload.ts @@ -10,10 +10,21 @@ import { z } from 'zod'; import type { Checkpoint } from '../checkpoint/checkpoint.js'; import { CheckpointHeader } from '../rollup/checkpoint_header.js'; import type { CheckpointProposal, CheckpointProposalCore } from './checkpoint_proposal.js'; -import type { Signable, SignatureDomainSeparator } from './signature_utils.js'; +import { + type CoordinationSignatureContext, + type CoordinationSignatureType, + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + type Signable, + coordinationSignatureContextEquals, + coordinationSignatureContextSchema, + readCoordinationSignatureContext, + serializeCoordinationSignatureContext, +} from './signature_utils.js'; /** Checkpoint consensus payload as signed by validators and verified on L1. */ export class ConsensusPayload implements Signable { + readonly primaryType: CoordinationSignatureType = 'CheckpointAttestation'; + private size: number | undefined; constructor( @@ -22,7 +33,9 @@ export class ConsensusPayload implements Signable { /** The archive root after the block is added */ public readonly archive: Fr, /** The fee asset price modifier in basis points (from oracle) */ - public readonly feeAssetPriceModifier: bigint = 0n, + public readonly feeAssetPriceModifier: bigint, + /** The signing domain (chainId + rollupAddress) the signature is bound to */ + public readonly signatureContext: CoordinationSignatureContext, ) {} static get schema() { @@ -31,36 +44,38 @@ export class ConsensusPayload implements Signable { header: CheckpointHeader.schema, archive: schemas.Fr, feeAssetPriceModifier: schemas.BigInt, + signatureContext: coordinationSignatureContextSchema, }) - .transform(obj => new ConsensusPayload(obj.header, obj.archive, obj.feeAssetPriceModifier)); + .transform(obj => new ConsensusPayload(obj.header, obj.archive, obj.feeAssetPriceModifier, obj.signatureContext)); } - static getFields(fields: FieldsOf) { - return [fields.header, fields.archive, fields.feeAssetPriceModifier] as const; + static getFields(fields: Omit, 'primaryType'>) { + return [fields.header, fields.archive, fields.feeAssetPriceModifier, fields.signatureContext] as const; } - getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer { + getPayloadToSign(): Buffer { + // Matches the L1 ProposePayload struct in ProposeLib.sol. const abi = parseAbiParameters( - 'uint8, ' + //domainSeperator - '(' + + '(' + 'bytes32, ' + // archive '(int256), ' + // oracleInput 'bytes32' + // headerHash ')', ); const archiveRoot = this.archive.toString(); - const headerHash = this.header.hash().toString(); - const encodedData = encodeAbiParameters(abi, [ - domainSeparator, - [archiveRoot, [this.feeAssetPriceModifier], headerHash], - ] as const); + const encodedData = encodeAbiParameters(abi, [[archiveRoot, [this.feeAssetPriceModifier], headerHash]] as const); return hexToBuffer(encodedData); } toBuffer(): Buffer { - return serializeToBuffer([this.header, this.archive, serializeSignedBigInt(this.feeAssetPriceModifier)]); + return serializeToBuffer([ + this.header, + this.archive, + serializeSignedBigInt(this.feeAssetPriceModifier), + serializeCoordinationSignatureContext(this.signatureContext), + ]); } public equals(other: ConsensusPayload | CheckpointProposal | CheckpointProposalCore): boolean { @@ -69,34 +84,39 @@ export class ConsensusPayload implements Signable { return ( this.header.equals(otherHeader) && this.archive.equals(other.archive) && - this.feeAssetPriceModifier === otherModifier + this.feeAssetPriceModifier === otherModifier && + coordinationSignatureContextEquals(this.signatureContext, other.signatureContext) ); } static fromBuffer(buf: Buffer | BufferReader): ConsensusPayload { const reader = BufferReader.asReader(buf); - const payload = new ConsensusPayload( - reader.readObject(CheckpointHeader), - reader.readObject(Fr), - reader.readInt256(), - ); - return payload; + const header = reader.readObject(CheckpointHeader); + const archive = reader.readObject(Fr); + const feeAssetPriceModifier = reader.readInt256(); + const signatureContext = readCoordinationSignatureContext(reader); + return new ConsensusPayload(header, archive, feeAssetPriceModifier, signatureContext); } - static fromFields(fields: FieldsOf): ConsensusPayload { - return new ConsensusPayload(fields.header, fields.archive, fields.feeAssetPriceModifier); + static fromFields(fields: Omit, 'primaryType'>): ConsensusPayload { + return new ConsensusPayload(fields.header, fields.archive, fields.feeAssetPriceModifier, fields.signatureContext); } - static fromCheckpoint(checkpoint: Checkpoint): ConsensusPayload { - return new ConsensusPayload(checkpoint.header, checkpoint.archive.root, checkpoint.feeAssetPriceModifier); + static fromCheckpoint(checkpoint: Checkpoint, signatureContext: CoordinationSignatureContext): ConsensusPayload { + return new ConsensusPayload( + checkpoint.header, + checkpoint.archive.root, + checkpoint.feeAssetPriceModifier, + signatureContext, + ); } static empty(): ConsensusPayload { - return new ConsensusPayload(CheckpointHeader.empty(), Fr.ZERO, 0n); + return new ConsensusPayload(CheckpointHeader.empty(), Fr.ZERO, 0n, EMPTY_COORDINATION_SIGNATURE_CONTEXT); } static random(): ConsensusPayload { - return new ConsensusPayload(CheckpointHeader.random(), Fr.random(), 0n); + return new ConsensusPayload(CheckpointHeader.random(), Fr.random(), 0n, EMPTY_COORDINATION_SIGNATURE_CONTEXT); } /** @@ -117,10 +137,12 @@ export class ConsensusPayload implements Signable { header: this.header.toInspect(), archive: this.archive.toString(), feeAssetPriceModifier: this.feeAssetPriceModifier.toString(), + chainId: this.signatureContext.chainId, + rollupAddress: this.signatureContext.rollupAddress.toString(), }; } toString() { - return `header: ${this.header.toString()}, archive: ${this.archive.toString()}, feeAssetPriceModifier: ${this.feeAssetPriceModifier}}`; + return `header: ${this.header.toString()}, archive: ${this.archive.toString()}, feeAssetPriceModifier: ${this.feeAssetPriceModifier}, chainId: ${this.signatureContext.chainId}, rollupAddress: ${this.signatureContext.rollupAddress.toString()}`; } } diff --git a/yarn-project/stdlib/src/p2p/signature_utils.test.ts b/yarn-project/stdlib/src/p2p/signature_utils.test.ts new file mode 100644 index 000000000000..ef1543bee0d6 --- /dev/null +++ b/yarn-project/stdlib/src/p2p/signature_utils.test.ts @@ -0,0 +1,188 @@ +import { Buffer32 } from '@aztec/foundation/buffer'; +import { keccak256, keccak256String } from '@aztec/foundation/crypto/keccak'; +import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { hexToBuffer } from '@aztec/foundation/string'; + +import { encodeAbiParameters, parseAbiParameters } from 'viem'; + +import { CommitteeAttestationsAndSigners } from '../block/proposal/attestations_and_signers.js'; +import { CommitteeAttestation } from '../block/proposal/committee_attestation.js'; +import { + TEST_COORDINATION_SIGNATURE_CONTEXT, + makeAndSignCommitteeAttestationsAndSigners, + makeBlockProposal, + makeCheckpointAttestation, + makeCheckpointProposal, +} from '../tests/mocks.js'; +import { CheckpointAttestation } from './checkpoint_attestation.js'; +import type { CoordinationSignatureType, Signable } from './signature_utils.js'; +import { + getHashedSignaturePayload, + getHashedSignaturePayloadTypedData, + recoverCoordinationSigner, +} from './signature_utils.js'; + +const DOMAIN_TYPEHASH = `0x${keccak256String( + 'EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)', +)}` as const; +const NAME_HASH = `0x${keccak256String('Aztec Rollup')}` as const; +const VERSION_HASH = `0x${keccak256String('1')}` as const; + +const TYPEHASHES: Record = { + BlockProposal: `0x${keccak256String('BlockProposal(bytes32 payloadHash)')}`, + CheckpointProposal: `0x${keccak256String('CheckpointProposal(bytes32 payloadHash)')}`, + CheckpointAttestation: `0x${keccak256String('CheckpointAttestation(bytes32 payloadHash)')}`, + AttestationsAndSigners: `0x${keccak256String('AttestationsAndSigners(bytes32 payloadHash)')}`, + SignedTxs: `0x${keccak256String('SignedTxs(bytes32 payloadHash)')}`, +}; + +const WRONG_CHAIN_CONTEXT = { + ...TEST_COORDINATION_SIGNATURE_CONTEXT, + chainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId + 1, +}; + +const WRONG_ROLLUP_CONTEXT = { + ...TEST_COORDINATION_SIGNATURE_CONTEXT, + rollupAddress: EthAddress.fromString('0x0000000000000000000000000000000000000002'), +}; + +function getSolidityCoordinationDigest(signable: Signable): Buffer32 { + const payloadHash = getHashedSignaturePayload(signable); + const domainSeparatorHash = Buffer32.fromBuffer( + keccak256( + hexToBuffer( + encodeAbiParameters(parseAbiParameters('bytes32,bytes32,bytes32,uint256,address'), [ + DOMAIN_TYPEHASH, + NAME_HASH, + VERSION_HASH, + BigInt(signable.signatureContext.chainId), + signable.signatureContext.rollupAddress.toString(), + ]), + ), + ), + ); + const structHash = Buffer32.fromBuffer( + keccak256( + hexToBuffer( + encodeAbiParameters(parseAbiParameters('bytes32,bytes32'), [ + TYPEHASHES[signable.primaryType], + payloadHash.toString() as `0x${string}`, + ]), + ), + ), + ); + + return Buffer32.fromBuffer( + keccak256(Buffer.concat([Buffer.from('1901', 'hex'), domainSeparatorHash.toBuffer(), structHash.toBuffer()])), + ); +} + +describe('coordination signature typed data', () => { + it('matches the Solidity EIP-712 digest for all proposal-path message types', async () => { + const signer = Secp256k1Signer.random(); + const blockProposal = await makeBlockProposal({ signer }); + const checkpointProposal = await makeCheckpointProposal({ signer }); + const checkpointAttestation = makeCheckpointAttestation({ signer }); + const attestationsAndSigners = new CommitteeAttestationsAndSigners( + [CommitteeAttestation.fromAddressAndSignature(signer.address, checkpointAttestation.signature)], + TEST_COORDINATION_SIGNATURE_CONTEXT, + ); + + expect(getHashedSignaturePayloadTypedData(blockProposal)).toEqual(getSolidityCoordinationDigest(blockProposal)); + expect(getHashedSignaturePayloadTypedData(checkpointProposal)).toEqual( + getSolidityCoordinationDigest(checkpointProposal), + ); + expect(getHashedSignaturePayloadTypedData(checkpointAttestation.payload)).toEqual( + getSolidityCoordinationDigest(checkpointAttestation.payload), + ); + expect(getHashedSignaturePayloadTypedData(attestationsAndSigners)).toEqual( + getSolidityCoordinationDigest(attestationsAndSigners), + ); + }); + + it('recovers with the right context and changes sender for the wrong domain', async () => { + const blockSigner = Secp256k1Signer.random(); + const checkpointSigner = Secp256k1Signer.random(); + const attesterSigner = Secp256k1Signer.random(); + const proposerSigner = Secp256k1Signer.random(); + const attestationsAndSignersSigner = Secp256k1Signer.random(); + + const blockProposal = await makeBlockProposal({ signer: blockSigner }); + const checkpointProposal = await makeCheckpointProposal({ + signer: checkpointSigner, + }); + const checkpointAttestation = makeCheckpointAttestation({ + attesterSigner, + proposerSigner, + }); + const attestationsAndSigners = new CommitteeAttestationsAndSigners( + [CommitteeAttestation.fromAddressAndSignature(attesterSigner.address, checkpointAttestation.signature)], + TEST_COORDINATION_SIGNATURE_CONTEXT, + ); + const attestationsAndSignersSignature = makeAndSignCommitteeAttestationsAndSigners( + attestationsAndSigners, + attestationsAndSignersSigner, + ); + + // Helpers to create variants of the same signables but with the wrong context baked in. + // Reset any memoized sender/proposer so the copy re-derives them against the altered context. + const withContext = ( + signable: T, + ctx: typeof TEST_COORDINATION_SIGNATURE_CONTEXT, + ): T => { + const copy = Object.create(Object.getPrototypeOf(signable)); + Object.assign(copy, signable); + copy.signatureContext = ctx; + if ('cachedSender' in copy) { + copy.cachedSender = undefined; + } + if ('cachedProposer' in copy) { + copy.cachedProposer = undefined; + } + return copy; + }; + + expect(blockProposal.getSender()).toEqual(blockSigner.address); + expect(withContext(blockProposal, WRONG_CHAIN_CONTEXT).getSender()).not.toEqual(blockSigner.address); + expect(withContext(blockProposal, WRONG_ROLLUP_CONTEXT).getSender()).not.toEqual(blockSigner.address); + + expect(checkpointProposal.getSender()).toEqual(checkpointSigner.address); + expect(withContext(checkpointProposal, WRONG_CHAIN_CONTEXT).getSender()).not.toEqual(checkpointSigner.address); + expect(withContext(checkpointProposal, WRONG_ROLLUP_CONTEXT).getSender()).not.toEqual(checkpointSigner.address); + + expect(checkpointAttestation.getSender()).toEqual(attesterSigner.address); + expect(checkpointAttestation.getProposer()).toEqual(proposerSigner.address); + // To change the context on a CheckpointAttestation, we need to swap the embedded payload's context. + const wrongChainAttestation = new CheckpointAttestation( + withContext(checkpointAttestation.payload, WRONG_CHAIN_CONTEXT), + checkpointAttestation.signature, + checkpointAttestation.proposerSignature, + ); + const wrongRollupAttestation = new CheckpointAttestation( + withContext(checkpointAttestation.payload, WRONG_ROLLUP_CONTEXT), + checkpointAttestation.signature, + checkpointAttestation.proposerSignature, + ); + expect(wrongChainAttestation.getSender()).not.toEqual(attesterSigner.address); + expect(wrongRollupAttestation.getSender()).not.toEqual(attesterSigner.address); + expect(wrongChainAttestation.getProposer()).not.toEqual(proposerSigner.address); + expect(wrongRollupAttestation.getProposer()).not.toEqual(proposerSigner.address); + + expect(recoverCoordinationSigner(attestationsAndSigners, attestationsAndSignersSignature)).toEqual( + attestationsAndSignersSigner.address, + ); + expect( + recoverCoordinationSigner( + withContext(attestationsAndSigners, WRONG_CHAIN_CONTEXT), + attestationsAndSignersSignature, + ), + ).not.toEqual(attestationsAndSignersSigner.address); + expect( + recoverCoordinationSigner( + withContext(attestationsAndSigners, WRONG_ROLLUP_CONTEXT), + attestationsAndSignersSignature, + ), + ).not.toEqual(attestationsAndSignersSigner.address); + }); +}); diff --git a/yarn-project/stdlib/src/p2p/signature_utils.ts b/yarn-project/stdlib/src/p2p/signature_utils.ts index f50bbe82e0c0..b71c9a8b89e3 100644 --- a/yarn-project/stdlib/src/p2p/signature_utils.ts +++ b/yarn-project/stdlib/src/p2p/signature_utils.ts @@ -1,37 +1,122 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto/keccak'; -import { makeEthSignDigest } from '@aztec/foundation/crypto/secp256k1-signer'; +import { tryRecoverAddress } from '@aztec/foundation/crypto/secp256k1-signer'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import type { Signature } from '@aztec/foundation/eth-signature'; +import { type BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -export enum SignatureDomainSeparator { - blockProposal = 0, - checkpointAttestation = 1, - attestationsAndSigners = 2, - checkpointProposal = 3, - signedTxs = 4, -} +import { type TypedDataDefinition, hashTypedData } from 'viem'; +import { z } from 'zod'; + +import type { ZodFor } from '../schemas/index.js'; + +export type CoordinationSignatureType = + | 'BlockProposal' + | 'CheckpointProposal' + | 'CheckpointAttestation' + | 'AttestationsAndSigners' + | 'SignedTxs'; + +export type CoordinationSignatureContext = { + chainId: number; + rollupAddress: EthAddress; +}; + +export const EMPTY_COORDINATION_SIGNATURE_CONTEXT: CoordinationSignatureContext = { + chainId: 0, + rollupAddress: EthAddress.ZERO, +}; + +export const coordinationSignatureContextSchema: ZodFor = z.object({ + chainId: z.number(), + rollupAddress: EthAddress.schema, +}); export interface Signable { - getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer; + readonly primaryType: CoordinationSignatureType; + readonly signatureContext: CoordinationSignatureContext; + getPayloadToSign(): Buffer; } -/** - * Get the hashed payload for the signature of the `Signable` - * @param s - The `Signable` to sign - * @returns The hashed payload for the signature of the `Signable` - */ -export function getHashedSignaturePayload(s: Signable, domainSeparator: SignatureDomainSeparator): Buffer32 { - return Buffer32.fromBuffer(keccak256(s.getPayloadToSign(domainSeparator))); +export function coordinationSignatureContextEquals( + a: CoordinationSignatureContext, + b: CoordinationSignatureContext, +): boolean { + return a.chainId === b.chainId && a.rollupAddress.equals(b.rollupAddress); +} + +export function serializeCoordinationSignatureContext(ctx: CoordinationSignatureContext): Buffer { + return serializeToBuffer([ctx.chainId, ctx.rollupAddress]); +} + +export function readCoordinationSignatureContext(reader: BufferReader): CoordinationSignatureContext { + const chainId = reader.readNumber(); + const rollupAddress = reader.readObject(EthAddress); + return { chainId, rollupAddress }; } /** - * Get the hashed payload for the signature of the `Signable` as an Ethereum signed message EIP-712 - * @param s - the `Signable` to sign - * @returns The hashed payload for the signature of the `Signable` as an Ethereum signed message + * Returns true if the signable carries a context matching the node's expected context. + * Use this at the P2P ingress boundary to reject foreign-chain messages cheaply before + * performing any signature recovery. */ -export function getHashedSignaturePayloadEthSignedMessage( - s: Signable, - domainSeparator: SignatureDomainSeparator, -): Buffer32 { - const payload = getHashedSignaturePayload(s, domainSeparator); - return makeEthSignDigest(payload); +export function hasValidSignatureContext(signable: Signable, expected: CoordinationSignatureContext): boolean { + return coordinationSignatureContextEquals(signable.signatureContext, expected); +} + +const COORDINATION_SIGNATURE_NAME = 'Aztec Rollup'; +const COORDINATION_SIGNATURE_VERSION = '1'; + +const EIP712_DOMAIN_FIELDS = [ + { name: 'name', type: 'string' }, + { name: 'version', type: 'string' }, + { name: 'chainId', type: 'uint256' }, + { name: 'verifyingContract', type: 'address' }, +] as const; + +const COORDINATION_SIGNATURE_TYPES = { + EIP712Domain: EIP712_DOMAIN_FIELDS, + BlockProposal: [{ name: 'payloadHash', type: 'bytes32' }], + CheckpointProposal: [{ name: 'payloadHash', type: 'bytes32' }], + CheckpointAttestation: [{ name: 'payloadHash', type: 'bytes32' }], + AttestationsAndSigners: [{ name: 'payloadHash', type: 'bytes32' }], + SignedTxs: [{ name: 'payloadHash', type: 'bytes32' }], +} as const; + +export function getCoordinationSignatureTypedDataForPayloadHash( + payloadHash: Buffer32, + type: CoordinationSignatureType, + context: CoordinationSignatureContext, +): TypedDataDefinition { + return { + domain: { + name: COORDINATION_SIGNATURE_NAME, + version: COORDINATION_SIGNATURE_VERSION, + chainId: context.chainId, + verifyingContract: context.rollupAddress.toString() as `0x${string}`, + }, + types: COORDINATION_SIGNATURE_TYPES, + primaryType: type, + message: { + payloadHash: payloadHash.toString() as `0x${string}`, + }, + }; +} + +export function getCoordinationSignatureTypedData(signable: Signable): TypedDataDefinition { + const payloadHash = getHashedSignaturePayload(signable); + return getCoordinationSignatureTypedDataForPayloadHash(payloadHash, signable.primaryType, signable.signatureContext); +} + +export function getHashedSignaturePayloadTypedData(signable: Signable): Buffer32 { + return Buffer32.fromString(hashTypedData(getCoordinationSignatureTypedData(signable))); +} + +export function recoverCoordinationSigner(signable: Signable, signature: Signature): EthAddress | undefined { + const digest = getHashedSignaturePayloadTypedData(signable); + return tryRecoverAddress(digest, signature, { allowYParityAsV: true }); +} + +export function getHashedSignaturePayload(s: Signable): Buffer32 { + return Buffer32.fromBuffer(keccak256(s.getPayloadToSign())); } diff --git a/yarn-project/stdlib/src/p2p/signed_txs.ts b/yarn-project/stdlib/src/p2p/signed_txs.ts index cd88f56aa278..17d06fcd36df 100644 --- a/yarn-project/stdlib/src/p2p/signed_txs.ts +++ b/yarn-project/stdlib/src/p2p/signed_txs.ts @@ -1,15 +1,20 @@ -import { Buffer32 } from '@aztec/foundation/buffer'; -import { tryRecoverAddress } from '@aztec/foundation/crypto/secp256k1-signer'; import type { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import type { TypedDataDefinition } from 'viem'; + import { MAX_TXS_PER_BLOCK } from '../deserialization/index.js'; import { Tx } from '../tx/tx.js'; import { - SignatureDomainSeparator, - getHashedSignaturePayload, - getHashedSignaturePayloadEthSignedMessage, + type CoordinationSignatureContext, + type CoordinationSignatureType, + EMPTY_COORDINATION_SIGNATURE_CONTEXT, + type Signable, + getCoordinationSignatureTypedData, + readCoordinationSignatureContext, + recoverCoordinationSigner, + serializeCoordinationSignatureContext, } from './signature_utils.js'; /** @@ -17,50 +22,56 @@ import { * The signature is over the transaction objects themselves, providing * data availability guarantees beyond just the transaction hashes. */ -export class SignedTxs { - private sender: EthAddress | undefined; +export class SignedTxs implements Signable { + readonly primaryType: CoordinationSignatureType = 'SignedTxs'; + + private cachedSender: EthAddress | undefined | null = undefined; constructor( /** The transactions */ public readonly txs: Tx[], /** The proposer's signature over the transactions */ public readonly signature: Signature, + /** The signing domain (chainId + rollupAddress) the signature is bound to */ + public readonly signatureContext: CoordinationSignatureContext, ) {} - /** - * Get the payload to sign for this signed txs. - */ - getPayloadToSign(domainSeparator: SignatureDomainSeparator): Buffer { - return serializeToBuffer([domainSeparator, this.txs.length, this.txs]); + getPayloadToSign(): Buffer { + return serializeToBuffer([this.txs.length, this.txs]); } /** - * Lazily evaluate the sender of the signed txs; result is cached + * Lazily evaluate the sender of the signed txs; result is cached. * @returns The sender address, or undefined if signature recovery fails */ getSender(): EthAddress | undefined { - if (!this.sender) { - const hashed = getHashedSignaturePayloadEthSignedMessage(this, SignatureDomainSeparator.signedTxs); - this.sender = tryRecoverAddress(hashed, this.signature); + if (this.cachedSender === undefined) { + this.cachedSender = recoverCoordinationSigner(this, this.signature) ?? null; } - return this.sender; + return this.cachedSender ?? undefined; } /** - * Create SignedTxs from a signer function + * Create SignedTxs from a typed-data signer function */ static async createFromSigner( txs: Tx[], - payloadSigner: (payload: Buffer32) => Promise, + signatureContext: CoordinationSignatureContext, + typedDataSigner: (typedData: TypedDataDefinition) => Promise, ): Promise { - const tempSignedTxs = new SignedTxs(txs, Signature.empty()); - const hashed = getHashedSignaturePayload(tempSignedTxs, SignatureDomainSeparator.signedTxs); - const signature = await payloadSigner(hashed); - return new SignedTxs(txs, signature); + const tempSignedTxs = new SignedTxs(txs, Signature.empty(), signatureContext); + const typedData = getCoordinationSignatureTypedData(tempSignedTxs); + const signature = await typedDataSigner(typedData); + return new SignedTxs(txs, signature, signatureContext); } toBuffer(): Buffer { - return serializeToBuffer([this.txs.length, this.txs, this.signature]); + return serializeToBuffer([ + this.txs.length, + this.txs, + this.signature, + serializeCoordinationSignatureContext(this.signatureContext), + ]); } static fromBuffer(buf: Buffer | BufferReader): SignedTxs { @@ -71,18 +82,25 @@ export class SignedTxs { } const txs = reader.readArray(txCount, Tx); const signature = reader.readObject(Signature); - return new SignedTxs(txs, signature); + const signatureContext = readCoordinationSignatureContext(reader); + return new SignedTxs(txs, signature, signatureContext); } getSize(): number { - return 4 /* txs.length */ + this.txs.reduce((acc, tx) => acc + tx.getSize(), 0) + this.signature.getSize(); + return ( + 4 /* txs.length */ + + this.txs.reduce((acc, tx) => acc + tx.getSize(), 0) + + this.signature.getSize() + + 4 /* chainId */ + + 20 /* rollupAddress */ + ); } static empty(): SignedTxs { - return new SignedTxs([], Signature.empty()); + return new SignedTxs([], Signature.empty(), EMPTY_COORDINATION_SIGNATURE_CONTEXT); } static random(): SignedTxs { - return new SignedTxs([Tx.random(), Tx.random()], Signature.random()); + return new SignedTxs([Tx.random(), Tx.random()], Signature.random(), EMPTY_COORDINATION_SIGNATURE_CONTEXT); } } diff --git a/yarn-project/stdlib/src/tests/mocks.ts b/yarn-project/stdlib/src/tests/mocks.ts index a2c813434260..39439d46f136 100644 --- a/yarn-project/stdlib/src/tests/mocks.ts +++ b/yarn-project/stdlib/src/tests/mocks.ts @@ -14,8 +14,11 @@ import { padArrayEnd, times } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto/random'; import { Secp256k1Signer } from '@aztec/foundation/crypto/secp256k1-signer'; import { Fr } from '@aztec/foundation/curves/bn254'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; +import { type TypedDataDefinition, hashTypedData } from 'viem'; + import type { ContractArtifact } from '../abi/abi.js'; import { PublicTxEffect } from '../avm/avm.js'; import type { AvmAccumulatedData } from '../avm/avm_accumulated_data.js'; @@ -51,7 +54,7 @@ import { BlockProposal } from '../p2p/block_proposal.js'; import { CheckpointAttestation } from '../p2p/checkpoint_attestation.js'; import { CheckpointProposal } from '../p2p/checkpoint_proposal.js'; import { ConsensusPayload } from '../p2p/consensus_payload.js'; -import { SignatureDomainSeparator, getHashedSignaturePayloadEthSignedMessage } from '../p2p/signature_utils.js'; +import { type CoordinationSignatureContext, getHashedSignaturePayloadTypedData } from '../p2p/signature_utils.js'; import { ChonkProof } from '../proofs/chonk_proof.js'; import { ProvingRequestType } from '../proofs/proving_request_type.js'; import { CheckpointHeader } from '../rollup/checkpoint_header.js'; @@ -86,6 +89,15 @@ import { makePublicDataWrite, } from './factories.js'; +export const TEST_COORDINATION_SIGNATURE_CONTEXT: CoordinationSignatureContext = { + chainId: 31337, + rollupAddress: EthAddress.fromNumber(1), +}; + +function signTypedData(signer: Secp256k1Signer, typedData: TypedDataDefinition): Signature { + return signer.sign(Buffer32.fromString(hashTypedData(typedData))); +} + export const randomTxHash = (): TxHash => TxHash.random(); export const mockTx = async ( @@ -519,6 +531,7 @@ export interface MakeConsensusPayloadOptions { txHashes?: TxHash[]; txs?: Tx[]; feeAssetPriceModifier?: bigint; + signatureContext?: CoordinationSignatureContext; } export interface MakeBlockProposalOptions { @@ -529,6 +542,7 @@ export interface MakeBlockProposalOptions { archiveRoot?: Fr; txHashes?: TxHash[]; txs?: Tx[]; + signatureContext?: CoordinationSignatureContext; } export interface MakeCheckpointProposalOptions { @@ -536,6 +550,7 @@ export interface MakeCheckpointProposalOptions { checkpointHeader?: CheckpointHeader; archiveRoot?: Fr; feeAssetPriceModifier?: bigint; + signatureContext?: CoordinationSignatureContext; /** Options for the lastBlock - if undefined, no lastBlock is included */ lastBlock?: { blockHeader?: BlockHeader; @@ -546,10 +561,7 @@ export interface MakeCheckpointProposalOptions { } // eslint-disable-next-line @typescript-eslint/no-unused-vars -const makeAndSignConsensusPayload = ( - domainSeparator: SignatureDomainSeparator, - options?: MakeConsensusPayloadOptions, -) => { +const makeAndSignConsensusPayload = (options?: MakeConsensusPayloadOptions) => { const header = options?.header ?? makeCheckpointHeader(1); const { signer = Secp256k1Signer.random(), archive = Fr.random(), feeAssetPriceModifier = 0n } = options ?? {}; @@ -557,9 +569,10 @@ const makeAndSignConsensusPayload = ( header, archive, feeAssetPriceModifier, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, }); - const hash = getHashedSignaturePayloadEthSignedMessage(payload, domainSeparator); + const hash = getHashedSignaturePayloadTypedData(payload); const signature = signer.sign(hash); return { blockNumber: header.slotNumber, payload, signature }; @@ -569,10 +582,7 @@ export const makeAndSignCommitteeAttestationsAndSigners = ( attestationsAndSigners: CommitteeAttestationsAndSigners, signer: Secp256k1Signer = Secp256k1Signer.random(), ) => { - const hash = getHashedSignaturePayloadEthSignedMessage( - attestationsAndSigners, - SignatureDomainSeparator.attestationsAndSigners, - ); + const hash = getHashedSignaturePayloadTypedData(attestationsAndSigners); return signer.sign(hash); }; @@ -584,6 +594,7 @@ export const makeBlockProposal = (options?: MakeBlockProposalOptions): Promise TxHash.random()); const txs = options?.txs; const signer = options?.signer ?? Secp256k1Signer.random(); + const signatureContext = options?.signatureContext ?? TEST_COORDINATION_SIGNATURE_CONTEXT; return BlockProposal.createProposalFromSigner( blockHeader, @@ -593,7 +604,9 @@ export const makeBlockProposal = (options?: MakeBlockProposalOptions): Promise Promise.resolve(signer.signMessage(_payload)), + signatureContext, + (typedData, _context) => Promise.resolve(signTypedData(signer, typedData)), + (typedData, _context) => Promise.resolve(signTypedData(signer, typedData)), ); }; @@ -602,6 +615,7 @@ export const makeCheckpointProposal = async (options?: MakeCheckpointProposalOpt const archiveRoot = options?.archiveRoot ?? Fr.random(); const feeAssetPriceModifier = options?.feeAssetPriceModifier ?? 0n; const signer = options?.signer ?? Secp256k1Signer.random(); + const signatureContext = options?.signatureContext ?? TEST_COORDINATION_SIGNATURE_CONTEXT; // Build a signed block proposal if lastBlock options are provided const lastBlockProposal = options?.lastBlock @@ -613,6 +627,7 @@ export const makeCheckpointProposal = async (options?: MakeCheckpointProposalOpt txHashes: options.lastBlock.txHashes, txs: options.lastBlock.txs, signer, + signatureContext, }) : undefined; @@ -622,7 +637,8 @@ export const makeCheckpointProposal = async (options?: MakeCheckpointProposalOpt CheckpointNumber(1), feeAssetPriceModifier, lastBlockProposal, - payload => Promise.resolve(signer.signMessage(payload)), + signatureContext, + typedData => Promise.resolve(signTypedData(signer, typedData)), ); }; @@ -636,6 +652,7 @@ export type MakeCheckpointAttestationOptions = { attesterSigner?: Secp256k1Signer; proposerSigner?: Secp256k1Signer; signer?: Secp256k1Signer; + signatureContext?: CoordinationSignatureContext; }; /** @@ -645,26 +662,27 @@ export const makeCheckpointAttestation = (options: MakeCheckpointAttestationOpti const header = options.header ?? makeCheckpointHeader(1); const archive = options.archive ?? Fr.random(); const feeAssetPriceModifier = options.feeAssetPriceModifier ?? 0n; + const signatureContext = options.signatureContext ?? TEST_COORDINATION_SIGNATURE_CONTEXT; const { signer, attesterSigner = signer, proposerSigner = signer } = options; - const payload = new ConsensusPayload(header, archive, feeAssetPriceModifier); + const payload = new ConsensusPayload(header, archive, feeAssetPriceModifier, signatureContext); // Sign as attester - const attestationHash = getHashedSignaturePayloadEthSignedMessage( - payload, - SignatureDomainSeparator.checkpointAttestation, - ); + const attestationHash = getHashedSignaturePayloadTypedData(payload); const attestationSigner = attesterSigner ?? Secp256k1Signer.random(); const attestationSignature = attestationSigner.sign(attestationHash); // Sign as proposer - use CheckpointProposal's payload format (serializeToBuffer) // This is different from ConsensusPayload's format (ABI encoding) const proposalSignerToUse = proposerSigner ?? Secp256k1Signer.random(); - const tempProposal = new CheckpointProposal(header, archive, feeAssetPriceModifier, Signature.empty()); - const proposalHash = getHashedSignaturePayloadEthSignedMessage( - tempProposal, - SignatureDomainSeparator.checkpointProposal, + const tempProposal = new CheckpointProposal( + header, + archive, + feeAssetPriceModifier, + Signature.empty(), + signatureContext, ); + const proposalHash = getHashedSignaturePayloadTypedData(tempProposal); const proposerSignature = proposalSignerToUse.sign(proposalHash); return new CheckpointAttestation(payload, attestationSignature, proposerSignature); @@ -677,13 +695,15 @@ export const makeCheckpointAttestationFromProposal = ( proposal: CheckpointProposal, attesterSigner?: Secp256k1Signer, ): CheckpointAttestation => { - const payload = new ConsensusPayload(proposal.checkpointHeader, proposal.archive, proposal.feeAssetPriceModifier); + const payload = new ConsensusPayload( + proposal.checkpointHeader, + proposal.archive, + proposal.feeAssetPriceModifier, + proposal.signatureContext, + ); // Sign as attester - const attestationHash = getHashedSignaturePayloadEthSignedMessage( - payload, - SignatureDomainSeparator.checkpointAttestation, - ); + const attestationHash = getHashedSignaturePayloadTypedData(payload); const attestationSigner = attesterSigner ?? Secp256k1Signer.random(); const attestationSignature = attestationSigner.sign(attestationHash); diff --git a/yarn-project/stdlib/src/timetable/index.test.ts b/yarn-project/stdlib/src/timetable/index.test.ts index a3e08687e9af..f0a2ede5d718 100644 --- a/yarn-project/stdlib/src/timetable/index.test.ts +++ b/yarn-project/stdlib/src/timetable/index.test.ts @@ -30,9 +30,11 @@ describe('timetable validation', () => { }); it('accepts a pipelined multi-block config that fits exactly one block', () => { + // timeReservedAtEnd = assemble(1) + 2*p2p(2) + blockDuration(4) = 9 + // slotDuration(15) - init(1) - reserved(9) = 5 ≥ blockDuration(4) const timing = createPipelinedCheckpointTimingModel({ - aztecSlotDuration: 12, - blockDuration: 8, + aztecSlotDuration: 15, + blockDuration: 4, checkpointInitializationTime: 1, checkpointAssembleTime: 1, p2pPropagationTime: 2, @@ -43,10 +45,12 @@ describe('timetable validation', () => { }); it('rejects a pipelined multi-block config that cannot fit one block', () => { + // timeReservedAtEnd = assemble(1) + 2*p2p(2) + blockDuration(4) = 9 + // slotDuration(13) - init(1) - reserved(9) = 3 < blockDuration(4) → reject expect(() => createPipelinedCheckpointTimingModel({ - aztecSlotDuration: 11, - blockDuration: 8, + aztecSlotDuration: 13, + blockDuration: 4, checkpointInitializationTime: 1, checkpointAssembleTime: 1, p2pPropagationTime: 2, @@ -55,6 +59,30 @@ describe('timetable validation', () => { ).toThrow(/less than one blockDuration/i); }); + it('computes pipelined timing for the default 72s/6s config', () => { + const timing = createPipelinedCheckpointTimingModel({ + aztecSlotDuration: 72, + blockDuration: 6, + checkpointInitializationTime: 1, + checkpointAssembleTime: 1, + p2pPropagationTime: 2, + l1PublishingTime: 12, + }); + + // timeReservedAtEnd = 1 + 2*2 + 6 = 11s + expect(timing.timeReservedAtEnd).toBe(11); + // available = 72 - 1 - 11 = 60s → floor(60/6) = 10 blocks + expect(timing.calculateMaxBlocksPerSlot()).toBe(10); + // Grace period is zero under early pipelining + expect(timing.pipeliningAttestationGracePeriod).toBe(0); + // Proposals no longer spill into the target slot + expect(timing.proposalWindowIntoTargetSlot).toBe(0); + // Attestation straggler grace is bounded by round-trip p2p + expect(timing.attestationWindowIntoTargetSlot).toBe(4); + // Assembly deadline sits at slot boundary + expect(timing.checkpointAssemblyDeadline).toBe(72); + }); + it('allows single-block mode without blockDuration', () => { const timing = createCheckpointTimingModel({ aztecSlotDuration: 10, diff --git a/yarn-project/stdlib/src/timetable/index.ts b/yarn-project/stdlib/src/timetable/index.ts index c94dcbbecb3f..d60db923fb4f 100644 --- a/yarn-project/stdlib/src/timetable/index.ts +++ b/yarn-project/stdlib/src/timetable/index.ts @@ -162,23 +162,39 @@ class StandardCheckpointTimingModel extends BaseCheckpointTiming { /** * Checkpoint timing model for proposer pipelining. * - * In this mode, the build work still starts in the current slot, but checkpoint - * assembly and attestation collection can extend into the target slot. The extra - * target-slot window getters are intended for consumers such as P2P validators - * that need to validate pipelined messages against wallclock time. + * In this mode, the build work starts at the wall-clock slot boundary and the + * checkpoint proposal is broadcast early enough that attestations complete by + * the end of the build slot. L1 submission can then be sent at the boundary of + * the target slot. The extra target-slot window getters are intended for + * consumers such as P2P validators that need to validate pipelined messages + * against wallclock time. */ class PipelinedCheckpointTimingModel extends BaseCheckpointTiming implements PipelinedCheckpointTiming { public get proposalWindowIntoTargetSlot(): number { - // Allow the p2p propagation time to receive a checkpoint proposal from leader - return this.p2pPropagationTime; + // Proposals no longer spill into the target slot: they are broadcast early + // enough in the build slot that attestations complete before the boundary. + // Any residual tolerance into the target slot is covered by clock disparity. + return 0; } public get attestationWindowIntoTargetSlot(): number { - return this.aztecSlotDuration - this.l1PublishingTime; + // Straggler grace: attestations aim to complete by build-slot end. Allow a + // small window into the target slot for late arrivals (round-trip p2p). + return 2 * this.p2pPropagationTime; + } + + public override get pipeliningAttestationGracePeriod(): number { + // Under the early-pipelining regime attestations complete inside the build + // slot itself, so there is no extra grace into the target slot. + return 0; } public get timeReservedAtEnd(): number { - return this.checkpointAssembleTime + this.p2pPropagationTime; + // Reserve enough time at the end of the build slot for: + // - assembling and broadcasting the checkpoint proposal + // - round-trip p2p propagation (proposal out, attestations back) + // - validators re-executing the last block + return this.checkpointAssembleTime + 2 * this.p2pPropagationTime + (this.blockDuration ?? 0); } public get minimumBuildSlotWork(): number { @@ -186,9 +202,8 @@ class PipelinedCheckpointTimingModel extends BaseCheckpointTiming implements Pip } public get checkpointAssemblyDeadline(): number { - // Allow enough time to - // - build all blocks - // - receive attestations + // Allow enough time to build all blocks and receive attestations. With + // `pipeliningAttestationGracePeriod = 0` this equals `aztecSlotDuration`. return this.aztecSlotDuration + this.pipeliningAttestationGracePeriod; } diff --git a/yarn-project/stdlib/src/tx/profiling.ts b/yarn-project/stdlib/src/tx/profiling.ts index d5dabb600de6..f341f5af3e2e 100644 --- a/yarn-project/stdlib/src/tx/profiling.ts +++ b/yarn-project/stdlib/src/tx/profiling.ts @@ -133,12 +133,12 @@ export class TxProfileResult { ], { nodeRPCCalls: { - perMethod: { getBlockHeader: { times: [1] } }, + perMethod: { getBlock: { times: [1] } }, roundTrips: { roundTrips: 1, totalBlockingTime: 1, roundTripDurations: [1], - roundTripMethods: [['getBlockHeader']], + roundTripMethods: [['getBlock']], }, }, timings: { @@ -184,12 +184,12 @@ export class UtilityExecutionResult { static random(): UtilityExecutionResult { return new UtilityExecutionResult([Fr.random()], [], 0n, { nodeRPCCalls: { - perMethod: { getBlockHeader: { times: [1] } }, + perMethod: { getBlock: { times: [1] } }, roundTrips: { roundTrips: 1, totalBlockingTime: 1, roundTripDurations: [1], - roundTripMethods: [['getBlockHeader']], + roundTripMethods: [['getBlock']], }, }, timings: { diff --git a/yarn-project/telemetry-client/src/config.ts b/yarn-project/telemetry-client/src/config.ts index 7d2c2a42e9b6..4e705c7a6fcf 100644 --- a/yarn-project/telemetry-client/src/config.ts +++ b/yarn-project/telemetry-client/src/config.ts @@ -1,4 +1,9 @@ -import { type ConfigMappingsType, booleanConfigHelper, getConfigFromMappings } from '@aztec/foundation/config'; +import { + type ConfigMappingsType, + booleanConfigHelper, + getConfigFromMappings, + numberConfigHelper, +} from '@aztec/foundation/config'; export interface TelemetryClientConfig { metricsCollectorUrl?: URL; @@ -33,14 +38,12 @@ export const telemetryClientConfigMappings: ConfigMappingsType parseInt(val), + ...numberConfigHelper(60000), }, otelExportTimeoutMs: { env: 'OTEL_EXPORT_TIMEOUT_MS', description: 'The timeout for exporting metrics', - defaultValue: 30000, // Default extracted from otel client - parseEnv: (val: string) => parseInt(val), + ...numberConfigHelper(30000), }, otelExcludeMetrics: { env: 'OTEL_EXCLUDE_METRICS', diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 563aa9455216..53feee910d90 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -400,6 +400,12 @@ export const SEQUENCER_STATE_TRANSITION_BUFFER_DURATION: MetricDefinition = { unit: 'ms', valueType: ValueType.INT, }; +export const SEQUENCER_STATE_DURATION: MetricDefinition = { + name: 'aztec.sequencer.state_duration', + description: 'Wall-clock time spent in each sequencer state, labelled by the state being left', + unit: 'ms', + valueType: ValueType.INT, +}; export const SEQUENCER_BLOCK_BUILD_DURATION: MetricDefinition = { name: 'aztec.sequencer.block.build_duration', description: 'Duration to build a block', diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 00118695f30f..be8df0c0dd84 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -3,6 +3,7 @@ import { booleanConfigHelper, getConfigFromMappings, numberConfigHelper, + optionalNumberConfigHelper, secretValueConfigHelper, } from '@aztec/foundation/config'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -30,6 +31,12 @@ export const validatorClientConfigMappings: ConfigMappingsType EthAddress.fromString(address.trim())), defaultValue: [], }, + l1ChainId: { + env: 'L1_CHAIN_ID', + description: 'The chain ID of the ethereum host.', + parseEnv: (val: string) => +val, + defaultValue: 31337, + }, disableValidator: { env: 'VALIDATOR_DISABLED', description: 'Do not run the validator', @@ -75,22 +82,22 @@ export const validatorClientConfigMappings: ConfigMappingsType parseInt(val, 10), + ...optionalNumberConfigHelper(), }, validateMaxDABlockGas: { env: 'VALIDATOR_MAX_DA_BLOCK_GAS', description: 'Maximum DA block gas for validation. Proposals exceeding this limit are rejected.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, validateMaxTxsPerBlock: { env: 'VALIDATOR_MAX_TX_PER_BLOCK', description: 'Maximum transactions per block for validation. Proposals exceeding this limit are rejected.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, validateMaxTxsPerCheckpoint: { env: 'VALIDATOR_MAX_TX_PER_CHECKPOINT', description: 'Maximum transactions per checkpoint for validation. Proposals exceeding this limit are rejected.', - parseEnv: (val: string) => parseInt(val, 10), + ...optionalNumberConfigHelper(), }, ...localSignerConfigMappings, ...validatorHASignerConfigMappings, diff --git a/yarn-project/validator-client/src/duties/validation_service.test.ts b/yarn-project/validator-client/src/duties/validation_service.test.ts index 760cd1773215..1cecf73317ac 100644 --- a/yarn-project/validator-client/src/duties/validation_service.test.ts +++ b/yarn-project/validator-client/src/duties/validation_service.test.ts @@ -3,7 +3,12 @@ import { CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/brand import { Buffer32 } from '@aztec/foundation/buffer'; import { Fr } from '@aztec/foundation/curves/bn254'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal } from '@aztec/stdlib/testing'; +import { + TEST_COORDINATION_SIGNATURE_CONTEXT, + makeBlockHeader, + makeCheckpointHeader, + makeCheckpointProposal, +} from '@aztec/stdlib/testing'; import { Tx } from '@aztec/stdlib/tx'; import { DutyType } from '@aztec/validator-ha-signer/types'; @@ -22,7 +27,7 @@ describe('ValidationService', () => { keys = [generatePrivateKey(), generatePrivateKey()]; addresses = keys.map(key => EthAddress.fromString(getAddressFromPrivateKey(key))); store = new LocalKeyStore(keys.map(key => Buffer32.fromString(key))); - service = new ValidationService(store); + service = new ValidationService(store, TEST_COORDINATION_SIGNATURE_CONTEXT); }); it('creates a block proposal with txs appended', async () => { @@ -101,17 +106,17 @@ describe('ValidationService', () => { const capturedContexts: Array<{ dutyType: DutyType; blockIndexWithinCheckpoint?: number }> = []; const spyStore = { ...store, - signMessageWithAddress: (address: EthAddress, message: Buffer32, context: any) => { + signTypedDataWithAddress: (address: EthAddress, typedData: any, context: any) => { capturedContexts.push({ dutyType: context.dutyType, blockIndexWithinCheckpoint: context.blockIndexWithinCheckpoint, }); - return store.signMessageWithAddress(address, message, context); + return store.signTypedDataWithAddress(address, typedData, context); }, getAddress: (index: number) => store.getAddress(index), getAddresses: () => store.getAddresses(), }; - const spyService = new ValidationService(spyStore as any); + const spyService = new ValidationService(spyStore as any, TEST_COORDINATION_SIGNATURE_CONTEXT); // Create checkpoint proposal with the already-signed block proposal const proposal = await spyService.createCheckpointProposal( diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index 2aaf4db496e9..958fb18851ef 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -1,11 +1,9 @@ import { type CheckpointNumber, IndexWithinCheckpoint, type SlotNumber } from '@aztec/foundation/branded-types'; -import { Buffer32 } from '@aztec/foundation/buffer'; -import { keccak256 } from '@aztec/foundation/crypto/keccak'; import { Fr } from '@aztec/foundation/curves/bn254'; import type { EthAddress } from '@aztec/foundation/eth-address'; import type { Signature } from '@aztec/foundation/eth-signature'; import { createLogger } from '@aztec/foundation/log'; -import type { CommitteeAttestationsAndSigners } from '@aztec/stdlib/block'; +import { CommitteeAttestationsAndSigners } from '@aztec/stdlib/block'; import { BlockProposal, type BlockProposalOptions, @@ -14,7 +12,8 @@ import { type CheckpointProposalCore, type CheckpointProposalOptions, ConsensusPayload, - SignatureDomainSeparator, + type CoordinationSignatureContext, + getCoordinationSignatureTypedData, } from '@aztec/stdlib/p2p'; import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, Tx } from '@aztec/stdlib/tx'; @@ -26,6 +25,7 @@ import type { ValidatorKeyStore } from '../key_store/interface.js'; export class ValidationService { constructor( private keyStore: ValidatorKeyStore, + private signatureContext: CoordinationSignatureContext, private log = createLogger('validator:validation-service'), ) {} @@ -62,8 +62,14 @@ export class ValidationService { // Create a signer that uses the appropriate address const address = proposerAttesterAddress ?? this.keyStore.getAddress(0); - const payloadSigner = (payload: Buffer32, context: SigningContext) => - this.keyStore.signMessageWithAddress(address, payload, context); + const payloadSigner = ( + typedData: Parameters[1], + context: SigningContext, + ) => this.keyStore.signTypedDataWithAddress(address, typedData, context); + const txsSigner = ( + typedData: Parameters[1], + context: SigningContext, + ) => this.keyStore.signTypedDataWithAddress(address, typedData, context); return BlockProposal.createProposalFromSigner( blockHeader, @@ -73,7 +79,9 @@ export class ValidationService { archive, txs.map(tx => tx.getTxHash()), options.publishFullTxs ? txs : undefined, + this.signatureContext, payloadSigner, + txsSigner, ); } @@ -106,9 +114,12 @@ export class ValidationService { } // Create a signer that takes payload and context, and uses the appropriate address - const payloadSigner = (payload: Buffer32, context: SigningContext) => { + const payloadSigner = ( + typedData: Parameters[1], + context: SigningContext, + ) => { const address = proposerAttesterAddress ?? this.keyStore.getAddress(0); - return this.keyStore.signMessageWithAddress(address, payload, context); + return this.keyStore.signTypedDataWithAddress(address, typedData, context); }; return CheckpointProposal.createProposalFromSigner( @@ -117,6 +128,7 @@ export class ValidationService { checkpointNumber, feeAssetPriceModifier, lastBlockProposal, + this.signatureContext, payloadSigner, ); } @@ -137,10 +149,13 @@ export class ValidationService { checkpointNumber: CheckpointNumber, ): Promise { // Create the attestation payload from the checkpoint proposal - const payload = new ConsensusPayload(proposal.checkpointHeader, proposal.archive, proposal.feeAssetPriceModifier); - const buf = Buffer32.fromBuffer( - keccak256(payload.getPayloadToSign(SignatureDomainSeparator.checkpointAttestation)), + const payload = new ConsensusPayload( + proposal.checkpointHeader, + proposal.archive, + proposal.feeAssetPriceModifier, + this.signatureContext, ); + const typedData = getCoordinationSignatureTypedData(payload); const context: SigningContext = { slot: proposal.slotNumber, @@ -151,8 +166,7 @@ export class ValidationService { // Sign each attestor in parallel, catching HA errors per-attestor const results = await Promise.allSettled( attestors.map(async attestor => { - const sig = await this.keyStore.signMessageWithAddress(attestor, buf, context); - // return new BlockAttestation(proposal.payload, sig, proposal.signature); + const sig = await this.keyStore.signTypedDataWithAddress(attestor, typedData, context); return new CheckpointAttestation(payload, sig, proposal.signature); }), ); @@ -199,9 +213,7 @@ export class ValidationService { dutyType: DutyType.ATTESTATIONS_AND_SIGNERS, }; - const buf = Buffer32.fromBuffer( - keccak256(attestationsAndSigners.getPayloadToSign(SignatureDomainSeparator.attestationsAndSigners)), - ); - return this.keyStore.signMessageWithAddress(proposer, buf, context); + const typedData = getCoordinationSignatureTypedData(attestationsAndSigners); + return this.keyStore.signTypedDataWithAddress(proposer, typedData, context); } } diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 6e9c4fef8494..4f08c7af681a 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -32,6 +32,10 @@ export function createProposalHandler( const blockProposalValidator = new BlockProposalValidator(deps.epochCache, { txsPermitted: !config.disableTransactions, maxTxsPerBlock: config.validateMaxTxsPerBlock ?? config.validateMaxTxsPerCheckpoint, + signatureContext: { + chainId: config.l1ChainId, + rollupAddress: config.l1Contracts.rollupAddress, + }, }); return new ProposalHandler( deps.checkpointsBuilder, diff --git a/yarn-project/validator-client/src/proposal_handler.test.ts b/yarn-project/validator-client/src/proposal_handler.test.ts index 9053fa5af58f..2f656be74f25 100644 --- a/yarn-project/validator-client/src/proposal_handler.test.ts +++ b/yarn-project/validator-client/src/proposal_handler.test.ts @@ -14,7 +14,12 @@ import type { ITxProvider, ValidatorClientFullConfig, WorldStateSynchronizer } f import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; import { accumulateCheckpointOutHashes } from '@aztec/stdlib/messaging'; import { CheckpointHeader } from '@aztec/stdlib/rollup'; -import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal } from '@aztec/stdlib/testing'; +import { + TEST_COORDINATION_SIGNATURE_CONTEXT, + makeBlockHeader, + makeCheckpointHeader, + makeCheckpointProposal, +} from '@aztec/stdlib/testing'; import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees'; import { GlobalVariables } from '@aztec/stdlib/tx'; @@ -77,7 +82,10 @@ describe('ProposalHandler checkpoint validation', () => { dateProvider = new TestDateProvider(); metrics = mock(); - config = {} as ValidatorClientFullConfig; + config = { + l1ChainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId, + l1Contracts: { rollupAddress: TEST_COORDINATION_SIGNATURE_CONTEXT.rollupAddress }, + } as ValidatorClientFullConfig; handler = new ProposalHandler( checkpointsBuilder, @@ -189,8 +197,8 @@ describe('ProposalHandler checkpoint validation', () => { checkpointHandler = handler; }); - const archiver = mock>(); - archiver.setProposedCheckpoint.mockResolvedValue(undefined); + const archiver = mock>(); + archiver.addProposedCheckpoint.mockResolvedValue(undefined); const blockData = { checkpointNumber: CheckpointNumber(3), @@ -206,7 +214,7 @@ describe('ProposalHandler checkpoint validation', () => { handler.register(p2p, true, archiver); await checkpointHandler!(proposal, {} as any); - expect(archiver.setProposedCheckpoint).toHaveBeenCalled(); + expect(archiver.addProposedCheckpoint).toHaveBeenCalled(); expect(metrics.recordCheckpointProposalToPipelinedStateDuration).toHaveBeenCalledWith(expect.any(Number)); }); }); diff --git a/yarn-project/validator-client/src/proposal_handler.ts b/yarn-project/validator-client/src/proposal_handler.ts index e7fe6624fb5a..9afb13bddcd8 100644 --- a/yarn-project/validator-client/src/proposal_handler.ts +++ b/yarn-project/validator-client/src/proposal_handler.ts @@ -96,7 +96,7 @@ export class ProposalHandler { }; /** Archiver reference for setting proposed checkpoints (pipelining). Set via register(). */ - private archiver?: Pick; + private archiver?: Pick; /** Returns current validator addresses for own-proposal detection. Set via register(). */ private getOwnValidatorAddresses?: () => string[]; @@ -132,7 +132,7 @@ export class ProposalHandler { register( p2pClient: P2P, shouldReexecute: boolean, - archiver?: Pick, + archiver?: Pick, getOwnValidatorAddresses?: () => string[], ): ProposalHandler { this.archiver = archiver; @@ -985,7 +985,7 @@ export class ProposalHandler { return false; } - await this.archiver.setProposedCheckpoint({ + await this.archiver.addProposedCheckpoint({ header: proposal.checkpointHeader, checkpointNumber: blockData.checkpointNumber, startBlock: BlockNumber(blockData.header.getBlockNumber() - blockData.indexWithinCheckpoint), @@ -1023,7 +1023,7 @@ export class ProposalHandler { } if (blockData) { - await this.archiver.setProposedCheckpoint({ + await this.archiver.addProposedCheckpoint({ header: proposal.checkpointHeader, checkpointNumber: blockData.checkpointNumber, startBlock: BlockNumber(blockData.header.getBlockNumber() - blockData.indexWithinCheckpoint), diff --git a/yarn-project/validator-client/src/validator.ha.integration.test.ts b/yarn-project/validator-client/src/validator.ha.integration.test.ts index 5ebe7d512bcf..e3b523d13493 100644 --- a/yarn-project/validator-client/src/validator.ha.integration.test.ts +++ b/yarn-project/validator-client/src/validator.ha.integration.test.ts @@ -20,7 +20,13 @@ import type { L2BlockSink, L2BlockSource } from '@aztec/stdlib/block'; import type { SlasherConfig, ValidatorClientFullConfig, WorldStateSynchronizer } from '@aztec/stdlib/interfaces/server'; import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging'; -import { makeBlockHeader, makeCheckpointHeader, makeCheckpointProposal, mockTx } from '@aztec/stdlib/testing'; +import { + TEST_COORDINATION_SIGNATURE_CONTEXT, + makeBlockHeader, + makeCheckpointHeader, + makeCheckpointProposal, + mockTx, +} from '@aztec/stdlib/testing'; import { TxHash } from '@aztec/stdlib/tx'; import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client'; import { INSERT_SCHEMA_VERSION, SCHEMA_SETUP, SCHEMA_VERSION } from '@aztec/validator-ha-signer/db'; @@ -123,7 +129,7 @@ describe('ValidatorClient HA Integration', () => { }; keyStoreManager = new KeystoreManager(keyStore); - rollupAddress = EthAddress.random(); + rollupAddress = TEST_COORDINATION_SIGNATURE_CONTEXT.rollupAddress; // Create 5 HA validator instances for use across all tests const baseConfig: ValidatorClientConfig & @@ -137,6 +143,7 @@ describe('ValidatorClient HA Integration', () => { disabledValidators: [], slashBroadcastedInvalidBlockPenalty: 1n, l1Contracts: { rollupAddress }, + l1ChainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId, slashDuplicateProposalPenalty: 1n, slashDuplicateAttestationPenalty: 1n, haSigningEnabled: true, @@ -200,6 +207,7 @@ describe('ValidatorClient HA Integration', () => { const blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: true, maxTxsPerBlock: undefined, + signatureContext: TEST_COORDINATION_SIGNATURE_CONTEXT, }); const proposalHandler = new ProposalHandler( checkpointsBuilder, diff --git a/yarn-project/validator-client/src/validator.integration.test.ts b/yarn-project/validator-client/src/validator.integration.test.ts index 9c0abcdd80c7..db04d8a8ba9d 100644 --- a/yarn-project/validator-client/src/validator.integration.test.ts +++ b/yarn-project/validator-client/src/validator.integration.test.ts @@ -36,6 +36,7 @@ import { getGenesisValues } from '@aztec/world-state/testing'; import { describe, expect, it, jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; +import { hashTypedData } from 'viem'; import { generatePrivateKey } from 'viem/accounts'; import { CheckpointBuilder, FullNodeCheckpointsBuilder } from './checkpoint_builder.js'; @@ -162,6 +163,7 @@ describe('ValidatorClient Integration', () => { const validator = await ValidatorClient.new( { l1Contracts: { rollupAddress }, + l1ChainId: chainId.toNumber(), validatorPrivateKeys: new SecretValue([privateKey]), attestationPollingIntervalMs: 100, disableValidator: false, @@ -545,7 +547,8 @@ describe('ValidatorClient Integration', () => { CheckpointNumber(1), 0n, undefined, - payload => Promise.resolve(proposerSigner.sign(payload)), + { chainId: chainId.toNumber(), rollupAddress }, + typedData => Promise.resolve(proposerSigner.sign(Buffer32.fromString(hashTypedData(typedData)))), ); await attestorValidateBlocks(blocks); diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index c4e91904c1ef..f3d66d088920 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -35,6 +35,7 @@ import type { SlasherConfig, WorldStateSynchronizer } from '@aztec/stdlib/interf import { type L1ToL2MessageSource, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging'; import type { BlockProposal } from '@aztec/stdlib/p2p'; import { + TEST_COORDINATION_SIGNATURE_CONTEXT, makeBlockHeader, makeBlockProposal, makeCheckpointAttestation, @@ -182,7 +183,8 @@ describe('ValidatorClient', () => { slashDuplicateAttestationPenalty: 1n, disableTransactions: false, haSigningEnabled: false, - l1Contracts: { rollupAddress: EthAddress.random() }, + l1ChainId: TEST_COORDINATION_SIGNATURE_CONTEXT.chainId, + l1Contracts: { rollupAddress: TEST_COORDINATION_SIGNATURE_CONTEXT.rollupAddress }, nodeId: 'test-node-id', pollingIntervalMs: 1000, signingTimeoutMs: 1000, diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 1181558d5d92..88de515cf296 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -30,6 +30,7 @@ import { CheckpointProposal, type CheckpointProposalCore, type CheckpointProposalOptions, + type CoordinationSignatureContext, } from '@aztec/stdlib/p2p'; import type { CheckpointHeader } from '@aztec/stdlib/rollup'; import type { BlockHeader, Tx } from '@aztec/stdlib/tx'; @@ -115,7 +116,11 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) this.tracer = telemetry.getTracer('Validator'); this.metrics = new ValidatorMetrics(telemetry); - this.validationService = new ValidationService(keyStore, this.log.createChild('validation-service')); + this.validationService = new ValidationService( + keyStore, + this.getSignatureContext(), + this.log.createChild('validation-service'), + ); // Refresh epoch cache every second to trigger alert if participation in committee changes this.epochCacheUpdateLoop = new RunningPromise(this.handleEpochCommitteeUpdate.bind(this), this.log, 1000); @@ -196,6 +201,10 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) const blockProposalValidator = new BlockProposalValidator(epochCache, { txsPermitted: !config.disableTransactions, maxTxsPerBlock: config.validateMaxTxsPerBlock, + signatureContext: { + chainId: config.l1ChainId, + rollupAddress: config.l1Contracts.rollupAddress, + }, }); const proposalHandler = new ProposalHandler( checkpointsBuilder, @@ -275,6 +284,13 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) return this.keyStore.signTypedDataWithAddress(addr, msg, context); } + private getSignatureContext(): CoordinationSignatureContext { + return { + chainId: this.config.l1ChainId, + rollupAddress: this.config.l1Contracts.rollupAddress, + }; + } + public getCoinbaseForAttestor(attestor: EthAddress): EthAddress { return this.keyStore.getCoinbaseAddress(attestor); } @@ -294,7 +310,11 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) public reloadKeystore(newManager: KeystoreManager): void { const newAdapter = NodeKeystoreAdapter.fromKeyStoreManager(newManager); this.keyStore = new HAKeyStore(newAdapter, this.slashingProtectionSigner); - this.validationService = new ValidationService(this.keyStore, this.log.createChild('validation-service')); + this.validationService = new ValidationService( + this.keyStore, + this.getSignatureContext(), + this.log.createChild('validation-service'), + ); } public async start() { @@ -871,7 +891,9 @@ export class ValidatorClient extends (EventEmitter as new () => WatcherEmitter) attestation => { if (!attestation.archive.equals(proposal.archive)) { this.log.warn( - `Received attestation for slot ${slot} with mismatched archive from ${attestation.getSender()?.toString()}`, + `Received attestation for slot ${slot} with mismatched archive from ${attestation + .getSender() + ?.toString()}`, { attestationArchive: attestation.archive.toString(), proposalArchive: proposal.archive.toString() }, ); return false; diff --git a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts index e68155d67a81..1e4c329fbc65 100644 --- a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts +++ b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts @@ -413,7 +413,7 @@ export abstract class BaseWallet implements Wallet { try { blockHeader = await this.pxe.getSyncedBlockHeader(); } catch { - blockHeader = (await this.aztecNode.getBlockHeader())!; + blockHeader = (await this.aztecNode.getBlockHeader('latest'))!; } const simulationOrigin = opts.from === NO_FROM ? AztecAddress.ZERO : opts.from; diff --git a/yarn-project/world-state/src/synchronizer/config.ts b/yarn-project/world-state/src/synchronizer/config.ts index e0a08a43f82d..c4b75f07484f 100644 --- a/yarn-project/world-state/src/synchronizer/config.ts +++ b/yarn-project/world-state/src/synchronizer/config.ts @@ -1,4 +1,9 @@ -import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; +import { + type ConfigMappingsType, + getConfigFromMappings, + numberConfigHelper, + optionalNumberConfigHelper, +} from '@aztec/foundation/config'; /** World State synchronizer configuration values. */ export interface WorldStateConfig { @@ -36,47 +41,46 @@ export interface WorldStateConfig { export const worldStateConfigMappings: ConfigMappingsType = { worldStateBlockCheckIntervalMS: { env: 'WS_BLOCK_CHECK_INTERVAL_MS', - parseEnv: (val: string) => +val, - defaultValue: 100, + ...numberConfigHelper(100), description: 'The frequency in which to check.', }, worldStateBlockRequestBatchSize: { env: 'WS_BLOCK_REQUEST_BATCH_SIZE', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'Size of the batch for each get-blocks request from the synchronizer to the archiver.', }, worldStateDbMapSizeKb: { env: 'WS_DB_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the world state DB in KB. Overwrites the general dataStoreMapSizeKb.', }, archiveTreeMapSizeKb: { env: 'ARCHIVE_TREE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the world state archive tree in KB. Overwrites the general worldStateDbMapSizeKb.', }, nullifierTreeMapSizeKb: { env: 'NULLIFIER_TREE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the world state nullifier tree in KB. Overwrites the general worldStateDbMapSizeKb.', }, noteHashTreeMapSizeKb: { env: 'NOTE_HASH_TREE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the world state note hash tree in KB. Overwrites the general worldStateDbMapSizeKb.', }, messageTreeMapSizeKb: { env: 'MESSAGE_TREE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the world state message tree in KB. Overwrites the general worldStateDbMapSizeKb.', }, publicDataTreeMapSizeKb: { env: 'PUBLIC_DATA_TREE_MAP_SIZE_KB', - parseEnv: (val: string) => +val, + ...optionalNumberConfigHelper(), description: 'The maximum possible size of the world state public data tree in KB. Overwrites the general worldStateDbMapSizeKb.', },