Skip to content

Commit a450fe9

Browse files
committed
store: Thread cache_size to ChainStore
Pass the per-chain cache_size configuration from TOML config through StoreBuilder and BlockStore to ChainStore, where it will be used to determine which blocks should be treated as uncached.
1 parent dd4538e commit a450fe9

3 files changed

Lines changed: 28 additions & 10 deletions

File tree

node/src/store_builder.rs

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@ pub struct StoreBuilder {
2424
subscription_manager: Arc<SubscriptionManager>,
2525
chain_head_update_listener: Arc<PostgresChainHeadUpdateListener>,
2626
/// Map network names to the shards where they are/should be stored
27-
chains: HashMap<String, ShardName>,
27+
/// and their cache_size setting
28+
chains: HashMap<String, (ShardName, i32)>,
2829
pub coord: Arc<PoolCoordinator>,
2930
registry: Arc<MetricsRegistry>,
3031
}
@@ -65,7 +66,7 @@ impl StoreBuilder {
6566
let chains = HashMap::from_iter(config.chains.chains.iter().map(|(name, chain)| {
6667
let shard = ShardName::new(chain.shard.to_string())
6768
.expect("config validation catches invalid names");
68-
(name.to_string(), shard)
69+
(name.to_string(), (shard, chain.cache_size))
6970
}));
7071

7172
let chain_head_update_listener = Arc::new(PostgresChainHeadUpdateListener::new(
@@ -177,15 +178,18 @@ impl StoreBuilder {
177178
logger: &Logger,
178179
pools: HashMap<ShardName, ConnectionPool>,
179180
subgraph_store: Arc<SubgraphStore>,
180-
chains: HashMap<String, ShardName>,
181+
chains: HashMap<String, (ShardName, i32)>,
181182
networks: Vec<String>,
182183
registry: Arc<MetricsRegistry>,
183184
) -> Arc<DieselStore> {
184185
let networks = networks
185186
.into_iter()
186187
.map(|name| {
187-
let shard = chains.get(&name).unwrap_or(&*PRIMARY_SHARD).clone();
188-
(name, shard)
188+
let (shard, cache_size) = chains
189+
.get(&name)
190+
.cloned()
191+
.unwrap_or_else(|| (PRIMARY_SHARD.clone(), 500));
192+
(name, shard, cache_size)
189193
})
190194
.collect();
191195

store/postgres/src/block_store.rs

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,9 @@ pub struct Inner {
218218
/// previous state in the database.
219219
stores: RwLock<HashMap<String, Arc<ChainStore>>>,
220220
// We keep this information so we can create chain stores during startup
221-
shards: Vec<(String, Shard)>,
221+
shards: Vec<(String, Shard, BlockNumber)>,
222+
// Per-chain cache_size settings
223+
cache_sizes: HashMap<String, BlockNumber>,
222224
pools: HashMap<Shard, ConnectionPool>,
223225
sender: Arc<NotificationSender>,
224226
mirror: PrimaryMirror,
@@ -240,8 +242,8 @@ impl BlockStore {
240242
/// a chain uses the pool from `pools` for the given shard.
241243
pub async fn new(
242244
logger: Logger,
243-
// (network, shard)
244-
shards: Vec<(String, Shard)>,
245+
// (network, shard, cache_size)
246+
shards: Vec<(String, Shard, BlockNumber)>,
245247
// shard -> pool
246248
pools: HashMap<Shard, ConnectionPool>,
247249
sender: Arc<NotificationSender>,
@@ -257,6 +259,10 @@ impl BlockStore {
257259
.await?;
258260
let chain_head_cache = TimedCache::new(CHAIN_HEAD_CACHE_TTL);
259261
let chains = shards.clone();
262+
let cache_sizes: HashMap<String, BlockNumber> = shards
263+
.iter()
264+
.map(|(name, _, cache_size)| (name.clone(), *cache_size))
265+
.collect();
260266

261267
let inner = Arc::new(Inner {
262268
logger,
@@ -267,11 +273,12 @@ impl BlockStore {
267273
mirror,
268274
chain_head_cache,
269275
chain_store_metrics,
276+
cache_sizes,
270277
});
271278
let block_store = Self { inner };
272279

273280
// For each configured chain, add a chain store
274-
for (chain_name, shard) in chains {
281+
for (chain_name, shard, _cache_size) in chains {
275282
if let Some(chain) = existing_chains
276283
.iter()
277284
.find(|chain| chain.name == chain_name)
@@ -363,6 +370,7 @@ impl BlockStore {
363370
);
364371
let ident = chain.network_identifier()?;
365372
let logger = self.logger.new(o!("network" => chain.name.clone()));
373+
let cache_size = self.cache_sizes.get(&chain.name).copied().unwrap_or(500);
366374
let store = ChainStore::new(
367375
logger,
368376
chain.name.clone(),
@@ -371,6 +379,7 @@ impl BlockStore {
371379
pool,
372380
ENV_VARS.store.recent_blocks_cache_capacity,
373381
self.chain_store_metrics.clone(),
382+
cache_size,
374383
);
375384
if create {
376385
store.create(&ident).await?;
@@ -565,7 +574,7 @@ impl BlockStore {
565574
let shard = self
566575
.shards
567576
.iter()
568-
.find_map(|(chain_id, shard)| {
577+
.find_map(|(chain_id, shard, _cache_size)| {
569578
if chain_id.as_str().eq(network) {
570579
Some(shard)
571580
} else {

store/postgres/src/chain_store.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2347,6 +2347,9 @@ pub struct ChainStore {
23472347
chain_head_ptr_cache: ChainHeadPtrCache,
23482348
/// Herd cache to prevent thundering herd on chain_head_ptr() lookups
23492349
chain_head_ptr_herd: HerdCache<Arc<Result<Option<BlockPtr>, StoreError>>>,
2350+
/// Number of blocks from chain head for which to keep block data cached.
2351+
/// Used with `GRAPH_STORE_IGNORE_BLOCK_CACHE` to simulate block data eviction.
2352+
cache_size: BlockNumber,
23502353
}
23512354

23522355
impl ChainStore {
@@ -2358,6 +2361,7 @@ impl ChainStore {
23582361
pool: ConnectionPool,
23592362
recent_blocks_cache_capacity: usize,
23602363
metrics: Arc<ChainStoreMetrics>,
2364+
cache_size: BlockNumber,
23612365
) -> Self {
23622366
let recent_blocks_cache =
23632367
RecentBlocksCache::new(recent_blocks_cache_capacity, chain.clone(), metrics.clone());
@@ -2378,6 +2382,7 @@ impl ChainStore {
23782382
ancestor_cache,
23792383
chain_head_ptr_cache,
23802384
chain_head_ptr_herd,
2385+
cache_size,
23812386
}
23822387
}
23832388

0 commit comments

Comments
 (0)