Skip to content

Commit b6e48a8

Browse files
committed
fix: increase timeout duration for topUsers fetching and implement better caching strategy
1 parent cc47576 commit b6e48a8

3 files changed

Lines changed: 90 additions & 110 deletions

File tree

components/leaderboard/LeaderBoard.tsx

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ export const LeaderBoard: React.FC = () => {
1414
const [isLoading, setIsLoading] = useState(false);
1515
const [leaderBoardData, setLeaderBoardData] = useState<UserStat[] | null>(null);
1616
const [isError, setIsError] = useState(false);
17+
const [retryCount, setRetryCount] = useState(0);
1718

1819
useEffect(() => {
1920
const fetchData = async () => {
@@ -22,19 +23,26 @@ export const LeaderBoard: React.FC = () => {
2223

2324
try {
2425
const response = await axios.get("/api/topUsers", {
25-
timeout: 8000
26+
timeout: 30000, // Increased timeout to 30 seconds
2627
});
2728
setLeaderBoardData(response.data);
2829
} catch (error) {
2930
console.error("Failed to fetch leaderboard data:", error);
3031
setIsError(true);
32+
33+
// Retry logic
34+
if (retryCount < 3) {
35+
setTimeout(() => {
36+
setRetryCount(prev => prev + 1);
37+
}, 2000); // Wait 2 seconds before retrying
38+
}
3139
} finally {
3240
setIsLoading(false);
3341
}
3442
};
3543

3644
fetchData();
37-
}, []);
45+
}, [retryCount]); // Add retryCount as dependency
3846

3947
return (
4048
<div className="text-center">
@@ -90,3 +98,4 @@ export const LeaderBoard: React.FC = () => {
9098

9199

92100

101+

pages/api/topUsers.js

Lines changed: 37 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,53 @@
1-
/* eslint-disable import/no-anonymous-default-export */
2-
import {
3-
getTopUsersFromDb,
4-
storeTopUsersInDb,
5-
} from "../../utils/fetchTopUsersFromDb";
61
import { fetchTopUsersByPullRequests } from "../../utils/fetchTopUsersByPullRequests";
2+
import { getTopUsersFromDb, storeTopUsersInDb } from "../../utils/database";
73

8-
export default async (req, res) => {
9-
res.setHeader('Access-Control-Allow-Origin', '*');
10-
res.setHeader('Access-Control-Allow-Methods', 'GET');
11-
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
4+
export default async function handler(req, res) {
5+
// Add CORS headers
6+
res.setHeader('Cache-Control', 's-maxage=300, stale-while-revalidate');
127

138
try {
14-
const message = 'API: Checking DB for cached top users...';
15-
console.log(message);
16-
res.setHeader('X-Debug-Message', message);
17-
9+
console.log('API: Checking DB for cached top users...');
1810
let data = await getTopUsersFromDb();
1911

20-
if (!data) {
21-
const message2 = 'API: No cached data found, fetching from GitHub...';
22-
console.log(message2);
23-
res.setHeader('X-Debug-Message-2', message2);
24-
25-
data = await fetchTopUsersByPullRequests("fork-commit-merge/fork-commit-merge");
26-
27-
if (data && data.length > 0) {
28-
const message3 = 'API: Storing new data in DB...';
29-
console.log(message3);
30-
res.setHeader('X-Debug-Message-3', message3);
12+
// If we have cached data, return it immediately
13+
if (data && data.length > 0) {
14+
console.log('API: Using cached data from DB');
15+
res.status(200).json(data);
3116

32-
await storeTopUsersInDb(data);
17+
// Optionally refresh cache in background if data is old
18+
const cacheAge = Date.now() - new Date(data[0].timestamp).getTime();
19+
if (cacheAge > 3600000) { // 1 hour
20+
refreshCache();
3321
}
34-
} else {
35-
const message4 = 'API: Using cached data from DB';
36-
console.log(message4);
37-
res.setHeader('X-Debug-Message-4', message4);
22+
return;
3823
}
3924

40-
if (!data || data.length === 0) {
41-
const message5 = 'API: No data available';
42-
console.log(message5);
43-
res.setHeader('X-Debug-Message-5', message5);
44-
return res.status(404).json({ error: 'No data available' });
45-
}
25+
// If no cached data, fetch new data
26+
console.log('API: No cached data found, fetching from GitHub...');
27+
data = await fetchTopUsersByPullRequests("fork-commit-merge/fork-commit-merge");
4628

47-
return res.status(200).json(data);
29+
if (data && data.length > 0) {
30+
console.log('API: Storing new data in DB...');
31+
await storeTopUsersInDb(data);
32+
res.status(200).json(data);
33+
} else {
34+
res.status(404).json({ error: 'No data available' });
35+
}
4836
} catch (error) {
4937
console.error('API route error:', error);
50-
console.error('Error details:', error.response?.data || error.message);
51-
return res.status(500).json({ error: 'Internal Server Error' });
38+
res.status(500).json({ error: 'Internal Server Error' });
5239
}
53-
};
54-
40+
}
5541

42+
// Background cache refresh
43+
async function refreshCache() {
44+
try {
45+
const data = await fetchTopUsersByPullRequests("fork-commit-merge/fork-commit-merge");
46+
if (data && data.length > 0) {
47+
await storeTopUsersInDb(data);
48+
}
49+
} catch (error) {
50+
console.error('Failed to refresh cache:', error);
51+
}
52+
}
5653

utils/fetchTopUsersByPullRequests.ts

Lines changed: 42 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -10,86 +10,55 @@ export const fetchTopUsersByPullRequests = async (
1010
repo: string
1111
): Promise<UserStat[]> => {
1212
const userStats: { [key: string]: { prCount: number; avatarUrl: string } } = {};
13-
const MAX_CONCURRENT_REQUESTS = 3;
1413
const PER_PAGE = 100;
1514

1615
try {
17-
// First, get the total PR count
18-
const initialResponse = await axios.get(`https://api.github.com/repos/${repo}/pulls`, {
19-
params: {
20-
state: 'closed',
21-
per_page: 1
22-
},
23-
headers: {
24-
Authorization: `token ${process.env.GITHUB_TOKEN}`,
25-
}
26-
});
27-
28-
const linkHeader = initialResponse.headers.link;
29-
const lastPageMatch = linkHeader?.match(/page=(\d+)>; rel="last"/);
30-
const totalPages = lastPageMatch ? parseInt(lastPageMatch[1]) : 1;
31-
32-
// Fetch PRs in batches
33-
for (let i = 0; i < totalPages; i += MAX_CONCURRENT_REQUESTS) {
34-
const requests = [];
35-
36-
// Create batch of concurrent requests
37-
for (let j = 0; j < MAX_CONCURRENT_REQUESTS && (i + j) < totalPages; j++) {
38-
const pageNum = i + j + 1;
39-
requests.push(
40-
axios.get(`https://api.github.com/repos/${repo}/pulls`, {
41-
params: {
42-
state: 'closed',
43-
per_page: PER_PAGE,
44-
page: pageNum
45-
},
46-
headers: {
47-
Authorization: `token ${process.env.GITHUB_TOKEN}`,
48-
},
49-
timeout: 8000
50-
}).catch(error => {
51-
console.error(`Failed to fetch page ${pageNum}:`, error);
52-
return { data: [] };
53-
})
54-
);
55-
}
16+
// Use cursor-based pagination instead of page numbers
17+
let hasNextPage = true;
18+
let cursor = null;
5619

57-
// Wait for batch to complete
58-
const responses = await Promise.all(requests);
59-
60-
// Process the responses
61-
responses.forEach(response => {
62-
if (response.data) {
63-
response.data.forEach((pr: any) => {
64-
if (pr.merged_at) {
65-
const username = pr.user.login;
20+
while (hasNextPage) {
21+
const response: { data: any[]; headers: { link?: string } } = await axios.get(`https://api.github.com/repos/${repo}/pulls`, {
22+
params: {
23+
state: 'closed',
24+
per_page: PER_PAGE,
25+
...(cursor ? { after: cursor } : {}),
26+
},
27+
headers: {
28+
Authorization: `token ${process.env.GITHUB_TOKEN}`,
29+
Accept: 'application/vnd.github.v3+json',
30+
},
31+
timeout: 30000, // 30 seconds timeout
32+
});
6633

67-
// Ignore specific users
68-
if (
69-
username === "dependabot" ||
70-
username === "dependabot[bot]" ||
71-
username === "nikohoffren"
72-
) {
73-
return;
74-
}
34+
const prs = response.data;
7535

76-
const avatarUrl = pr.user.avatar_url;
77-
userStats[username] = {
78-
prCount: (userStats[username]?.prCount || 0) + 1,
79-
avatarUrl,
80-
};
81-
}
82-
});
36+
// Process PRs
37+
prs.forEach((pr: any) => {
38+
if (pr.merged_at) {
39+
const username = pr.user.login;
40+
if (!shouldExcludeUser(username)) {
41+
userStats[username] = {
42+
prCount: (userStats[username]?.prCount || 0) + 1,
43+
avatarUrl: pr.user.avatar_url,
44+
};
45+
}
8346
}
8447
});
8548

86-
// Add a small delay between batches to avoid rate limiting
87-
if (i + MAX_CONCURRENT_REQUESTS < totalPages) {
49+
// Check if there are more pages
50+
const linkHeader = response.headers.link;
51+
hasNextPage = linkHeader?.includes('rel="next"') ?? false;
52+
const nextLink = linkHeader?.match(/<([^>]+)>;\s*rel="next"/)?.[1];
53+
cursor = nextLink ? new URL(nextLink).searchParams.get('after') : null;
54+
55+
// Add delay between requests to avoid rate limiting
56+
if (hasNextPage) {
8857
await new Promise(resolve => setTimeout(resolve, 1000));
8958
}
9059
}
9160

92-
const sortedUsers: UserStat[] = Object.entries(userStats)
61+
return Object.entries(userStats)
9362
.sort(([, a], [, b]) => b.prCount - a.prCount)
9463
.slice(0, 20)
9564
.map(([username, { prCount, avatarUrl }]) => ({
@@ -98,9 +67,14 @@ export const fetchTopUsersByPullRequests = async (
9867
avatarUrl,
9968
}));
10069

101-
return sortedUsers;
10270
} catch (error) {
10371
console.error('Error fetching PRs:', error);
10472
throw error;
10573
}
10674
};
75+
76+
function shouldExcludeUser(username: string): boolean {
77+
const excludedUsers = ['dependabot', 'dependabot[bot]', 'nikohoffren'];
78+
return excludedUsers.includes(username);
79+
}
80+

0 commit comments

Comments
 (0)