Skip to content

Commit 623fe17

Browse files
committed
Update useChunkedEventLogs.ts
1 parent 03963c5 commit 623fe17

File tree

1 file changed

+127
-102
lines changed

1 file changed

+127
-102
lines changed

src/hooks/useChunkedEventLogs.ts

Lines changed: 127 additions & 102 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
"use client";
22

33
import { useState, useEffect, useRef, useCallback } from "react";
4-
import { type Address, parseAbiItem } from "viem";
4+
import { type Address, type Hex, parseAbiItem, decodeEventLog } from "viem";
55
import { usePublicClient } from "wagmi";
66
import { DEPLOYMENT_BLOCK } from "@/config/contracts";
77

88
const CHUNK_SIZE = BigInt(9999);
9-
const MAX_CONCURRENT = 3;
10-
const DELAY_BETWEEN_BATCHES = 200;
9+
const MAX_CONCURRENT = 6;
10+
const DELAY_BETWEEN_BATCHES = 50;
11+
const FLUSH_INTERVAL = 3; // flush UI every N batches
1112
const BLOCKS_PER_DAY = BigInt(43200); // ~2s block time on Base
13+
const BACKOFF_DELAYS = [3000, 6000, 12000, 20000]; // escalating delays on consecutive errors
1214

1315
export type EventRow = {
1416
type: string;
@@ -44,6 +46,14 @@ const EVENT_WITHDRAWALS = parseAbiItem(
4446
"event TokensWithdrawn(uint32 indexed programId, address indexed wallet, uint256 amount)"
4547
);
4648

49+
const ALL_EVENTS = [EVENT_DEPOSITS, EVENT_TRANSFERS, EVENT_PARENT_TRANSFERS, EVENT_WITHDRAWALS] as const;
50+
51+
// Pre-compute topic0 for each event to match returned logs
52+
const TOPIC_DEPOSIT = EVENT_DEPOSITS.name;
53+
const TOPIC_TRANSFER = EVENT_TRANSFERS.name;
54+
const TOPIC_PARENT_TRANSFER = EVENT_PARENT_TRANSFERS.name;
55+
const TOPIC_WITHDRAWAL = EVENT_WITHDRAWALS.name;
56+
4757
function delay(ms: number): Promise<void> {
4858
return new Promise((r) => setTimeout(r, ms));
4959
}
@@ -69,11 +79,70 @@ async function fetchWithRetry<T>(
6979
throw new Error("Max retries exceeded");
7080
}
7181

82+
function parseLog(log: { topics: Hex[]; data: Hex; blockNumber: bigint; transactionHash: Hex }): EventRow | null {
83+
if (!log.topics.length) return null;
84+
try {
85+
const decoded = decodeEventLog({
86+
abi: ALL_EVENTS,
87+
data: log.data,
88+
topics: log.topics as [Hex, ...Hex[]],
89+
});
90+
91+
const base = { blockNumber: log.blockNumber, txHash: log.transactionHash };
92+
const args = decoded.args as Record<string, unknown>;
93+
94+
switch (decoded.eventName) {
95+
case TOPIC_DEPOSIT:
96+
return {
97+
...base,
98+
type: "Deposit",
99+
depositId: String(args.depositId ?? ""),
100+
programId: Number(args.programId),
101+
wallet: String(args.wallet ?? ""),
102+
amount: BigInt(args.amount as bigint ?? 0),
103+
rewardType: Number(args.rewardType ?? 0),
104+
note: String(args.note ?? ""),
105+
};
106+
case TOPIC_TRANSFER:
107+
return {
108+
...base,
109+
type: "Transfer",
110+
programId: Number(args.programId),
111+
wallet: String(args.from ?? ""),
112+
amount: BigInt(args.amount as bigint ?? 0),
113+
rewardType: Number(args.rewardType ?? 0),
114+
note: String(args.note ?? ""),
115+
};
116+
case TOPIC_PARENT_TRANSFER:
117+
return {
118+
...base,
119+
type: "TransferToParent",
120+
programId: Number(args.programId),
121+
wallet: String(args.from ?? ""),
122+
amount: BigInt(args.amount as bigint ?? 0),
123+
note: String(args.note ?? ""),
124+
};
125+
case TOPIC_WITHDRAWAL:
126+
return {
127+
...base,
128+
type: "Withdrawal",
129+
programId: Number(args.programId),
130+
wallet: String(args.wallet ?? ""),
131+
amount: BigInt(args.amount as bigint ?? 0),
132+
};
133+
default:
134+
return null;
135+
}
136+
} catch {
137+
return null; // unknown event from contract, skip
138+
}
139+
}
140+
72141
export function useChunkedEventLogs(options: {
73142
address: Address;
74143
programId?: number;
75144
timeRange: TimeRange;
76-
trigger: number; // increment to start a new fetch
145+
trigger: number;
77146
}) {
78147
const { address, programId, timeRange, trigger } = options;
79148
const publicClient = usePublicClient();
@@ -139,121 +208,77 @@ export function useChunkedEventLogs(options: {
139208

140209
const allRows: EventRow[] = [];
141210
let completed = 0;
211+
let consecutiveErrors = 0;
142212
const pid = programId !== undefined ? Number(programId) : undefined;
143213

144214
for (let i = 0; i < chunks.length; i += MAX_CONCURRENT) {
145215
if (signal.aborted) break;
146216

217+
// Apply backoff delay if we had consecutive errors
218+
if (consecutiveErrors > 0) {
219+
const backoffIdx = Math.min(consecutiveErrors - 1, BACKOFF_DELAYS.length - 1);
220+
await delay(BACKOFF_DELAYS[backoffIdx]);
221+
}
222+
147223
const batch = chunks.slice(i, i + MAX_CONCURRENT);
148224

149-
const batchResults = await Promise.all(
150-
batch.map(([from, to]) =>
151-
fetchWithRetry(async () => {
152-
const [deposits, transfers, parentTransfers, withdrawals] =
153-
await Promise.all([
154-
publicClient.getLogs({
155-
address,
156-
event: EVENT_DEPOSITS,
157-
args: pid !== undefined ? { programId: pid } : undefined,
158-
fromBlock: from,
159-
toBlock: to,
160-
}),
161-
publicClient.getLogs({
225+
try {
226+
// Single getLogs per chunk — fetch ALL contract events at once
227+
const batchResults = await Promise.all(
228+
batch.map(([from, to]) =>
229+
fetchWithRetry(async () => {
230+
const logs = await publicClient.request({
231+
method: "eth_getLogs",
232+
params: [{
162233
address,
163-
event: EVENT_TRANSFERS,
164-
args: pid !== undefined ? { programId: pid } : undefined,
165-
fromBlock: from,
166-
toBlock: to,
167-
}),
168-
publicClient.getLogs({
169-
address,
170-
event: EVENT_PARENT_TRANSFERS,
171-
args: pid !== undefined ? { programId: pid } : undefined,
172-
fromBlock: from,
173-
toBlock: to,
174-
}),
175-
publicClient.getLogs({
176-
address,
177-
event: EVENT_WITHDRAWALS,
178-
args: pid !== undefined ? { programId: pid } : undefined,
179-
fromBlock: from,
180-
toBlock: to,
181-
}),
182-
]);
183-
184-
const rows: EventRow[] = [];
185-
186-
for (const log of deposits) {
187-
if (!log.args) continue;
188-
rows.push({
189-
type: "Deposit",
190-
depositId: log.args.depositId?.toString(),
191-
programId: Number(log.args.programId),
192-
wallet: log.args.wallet || "",
193-
amount: log.args.amount || BigInt(0),
194-
rewardType: log.args.rewardType,
195-
note: log.args.note,
196-
blockNumber: log.blockNumber,
197-
txHash: log.transactionHash,
198-
});
199-
}
200-
201-
for (const log of transfers) {
202-
if (!log.args) continue;
203-
rows.push({
204-
type: "Transfer",
205-
programId: Number(log.args.programId),
206-
wallet: log.args.from || "",
207-
amount: log.args.amount || BigInt(0),
208-
rewardType: log.args.rewardType,
209-
note: log.args.note,
210-
blockNumber: log.blockNumber,
211-
txHash: log.transactionHash,
212-
});
213-
}
214-
215-
for (const log of parentTransfers) {
216-
if (!log.args) continue;
217-
rows.push({
218-
type: "TransferToParent",
219-
programId: Number(log.args.programId),
220-
wallet: log.args.from || "",
221-
amount: log.args.amount || BigInt(0),
222-
note: log.args.note,
223-
blockNumber: log.blockNumber,
224-
txHash: log.transactionHash,
234+
fromBlock: `0x${from.toString(16)}` as Hex,
235+
toBlock: `0x${to.toString(16)}` as Hex,
236+
}],
225237
});
226-
}
227-
228-
for (const log of withdrawals) {
229-
if (!log.args) continue;
230-
rows.push({
231-
type: "Withdrawal",
232-
programId: Number(log.args.programId),
233-
wallet: log.args.wallet || "",
234-
amount: log.args.amount || BigInt(0),
235-
blockNumber: log.blockNumber,
236-
txHash: log.transactionHash,
237-
});
238-
}
239-
240-
return rows;
241-
}, signal)
242-
)
243-
);
244238

245-
for (const rows of batchResults) {
246-
allRows.push(...rows);
239+
const rows: EventRow[] = [];
240+
for (const log of logs as Array<{ topics: Hex[]; data: Hex; blockNumber: Hex; transactionHash: Hex }>) {
241+
const parsed = parseLog({
242+
topics: log.topics,
243+
data: log.data,
244+
blockNumber: BigInt(log.blockNumber),
245+
transactionHash: log.transactionHash,
246+
});
247+
if (!parsed) continue;
248+
// Client-side programId filter
249+
if (pid !== undefined && parsed.programId !== pid) continue;
250+
rows.push(parsed);
251+
}
252+
return rows;
253+
}, signal)
254+
)
255+
);
256+
257+
// Success — reset backoff
258+
consecutiveErrors = 0;
259+
260+
for (const rows of batchResults) {
261+
allRows.push(...rows);
262+
}
263+
} catch (err: unknown) {
264+
if (signal.aborted) break;
265+
consecutiveErrors++;
266+
// If we've exhausted backoff delays, give up
267+
if (consecutiveErrors > BACKOFF_DELAYS.length) throw err;
268+
// Retry this batch by rewinding the loop index
269+
i -= MAX_CONCURRENT;
270+
continue;
247271
}
248272

249273
completed += batch.length;
250274
setCompletedChunks(completed);
251275
setProgress(completed / chunks.length);
252276

253-
// Flush progressively to state every batch
254-
setEvents([...allRows]);
277+
// Flush to UI periodically, not every batch
278+
if (completed % FLUSH_INTERVAL === 0 || completed === chunks.length) {
279+
setEvents([...allRows]);
280+
}
255281

256-
// Polite delay between batches
257282
if (i + MAX_CONCURRENT < chunks.length) {
258283
await delay(DELAY_BETWEEN_BATCHES);
259284
}

0 commit comments

Comments
 (0)