Skip to content

Commit e9453b3

Browse files
authored
Merge pull request #515 from MDA2AV/elysia-imp
elysia improvements
2 parents ab9055b + e0530df commit e9453b3

23 files changed

Lines changed: 289 additions & 336 deletions

frameworks/elysia/Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@ FROM oven/bun AS build
33

44
WORKDIR /app
55

6-
# Cache packages installation (bun.lock intentionally omitted — regenerated in-container)
6+
# Cache packages installation
77
COPY package.json package.json
8+
COPY bun.lock bun.lock
89

910
RUN bun install --production
1011

frameworks/elysia/bun.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

frameworks/elysia/package.json

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,8 @@
22
"name": "httparena-elysia",
33
"private": true,
44
"dependencies": {
5-
"@elysiajs/static": "^1.4.9",
6-
"elysia": "^1.4.28",
7-
"pg": "^8.13.1"
5+
"@elysiajs/static": "^1.4.0",
6+
"elysia": "^1.4.28"
87
},
98
"devDependencies": {
109
"@types/bun": "^1.3.12"

frameworks/elysia/server.ts

Lines changed: 139 additions & 168 deletions
Original file line numberDiff line numberDiff line change
@@ -1,184 +1,155 @@
1-
import { Elysia } from "elysia";
1+
import { Elysia, status } from "elysia";
22
import { staticPlugin } from "@elysiajs/static";
3-
import { readFileSync } from "fs";
4-
import { brotliCompressSync } from "node:zlib";
5-
import cluster from "cluster";
6-
import { availableParallelism } from "os";
73

8-
// Worker count: env override wins, else one per CPU. Each worker costs
9-
// ~150 MB RSS. Override with ELYSIA_WORKERS env var to cap lower on small boxes.
10-
const WORKERS = Math.max(
11-
1,
12-
Math.min(
13-
parseInt(process.env.ELYSIA_WORKERS ?? "", 10) || availableParallelism(),
14-
availableParallelism(),
15-
),
16-
);
4+
import { SQL } from "bun";
175

18-
// Preload dataset for /json (both primary and workers read it, ~1 MB).
19-
const datasetItems: any[] = JSON.parse(
20-
readFileSync("/data/dataset.json", "utf8"),
21-
);
22-
23-
// Resolve the async staticPlugin at real top level (outside the cluster
24-
// conditional) — `bun build --compile` can't handle top-level await inside
25-
// if/else blocks, and we need the plugin fully resolved before .use() so
26-
// its routes register synchronously into the main Elysia chain.
27-
//
28-
// alwaysStatic: false — `true` (the NODE_ENV=production default) pre-registers
29-
// each file as a Bun static route, which requires a fully-buffered body and
30-
// crashes with `Bun.file()` streams. Dynamic routing reads disk per request
31-
// which is also more production-rule compliant.
32-
const staticModule = await staticPlugin({
33-
assets: "/data/static",
34-
prefix: "/static",
35-
etag: false,
36-
alwaysStatic: false,
37-
});
6+
import cluster from "cluster";
7+
import { availableParallelism } from "os";
8+
import { readFileSync } from "fs";
389

3910
if (cluster.isPrimary) {
40-
for (let i = 0; i < WORKERS; i++) cluster.fork();
41-
cluster.on("exit", (w) => {
42-
console.error(`worker ${w.process.pid} exited, respawning`);
43-
cluster.fork();
44-
});
11+
const workers = availableParallelism();
12+
for (let i = 0; i < workers; i++) cluster.fork();
4513
} else {
14+
const datasetItems: any[] = JSON.parse(
15+
readFileSync("/data/dataset.json", "utf8"),
16+
);
4617

47-
// PostgreSQL pool for /async-db (node-postgres via Bun's node_modules resolver).
48-
// Pool size per worker is DATABASE_MAX_CONN / WORKERS so the total across the
49-
// cluster matches the server's configured max_connections (256 by default).
50-
let pgPool: any = null;
51-
{
52-
const dbUrl = process.env.DATABASE_URL;
53-
if (dbUrl) {
54-
try {
55-
const { Pool } = require("pg");
56-
const totalMax = parseInt(process.env.DATABASE_MAX_CONN ?? "", 10) || 256;
57-
const perWorker = Math.max(1, Math.floor(totalMax / WORKERS));
58-
pgPool = new Pool({ connectionString: dbUrl, max: perWorker });
59-
} catch (_) {}
60-
}
61-
}
18+
// Per-worker pool, capped so workers × perWorker stays under Postgres
19+
// max_connections. 240 = 256 default minus a reserve for admin/meta.
20+
const workers = availableParallelism();
21+
const totalMax = parseInt(process.env.DATABASE_MAX_CONN ?? "", 10) || 256;
22+
const perWorker = Math.max(1, Math.floor(Math.min(totalMax, 240) / workers));
23+
const databaseURL = process.env.DATABASE_URL;
24+
const pg = databaseURL
25+
? new SQL({ url: databaseURL, max: perWorker })
26+
: undefined;
27+
pg?.connect().catch((e) => console.error("pg connect failed:", e));
6228

63-
const EMPTY_DB_JSON = '{"items":[],"count":0}';
29+
new Elysia()
30+
.headers({
31+
server: "Elysia",
32+
})
33+
.use(staticPlugin({
34+
assets: "/data/static",
35+
prefix: "/static",
36+
}))
37+
.get("/pipeline", ({ set }) => {
38+
set.headers["content-type"] = "text/plain";
39+
return "ok";
40+
})
41+
.get("/baseline11", ({ query }) => {
42+
let sum = 0;
43+
for (const v of Object.values(query)) sum += +v || 0;
44+
return sum;
45+
})
46+
.post(
47+
"/baseline11",
48+
({ query, body }) => {
49+
let total = 0;
50+
for (const v of Object.values(query)) total += +v || 0;
6451

65-
new Elysia()
66-
.get("/pipeline", () => new Response("ok", { headers: { "content-type": "text/plain" } }))
67-
.get("/baseline11", ({ query }) => {
68-
let sum = 0;
69-
for (const v of Object.values(query)) sum += parseInt(v as string, 10) || 0;
70-
return new Response(String(sum), {
71-
headers: { "content-type": "text/plain" },
72-
});
73-
})
74-
.post("/baseline11", async ({ query, request }) => {
75-
let total = 0;
76-
for (const v of Object.values(query)) total += parseInt(v as string, 10) || 0;
77-
const body = await request.text();
78-
const n = parseInt(body.trim(), 10);
79-
if (!isNaN(n)) total += n;
80-
return new Response(String(total), {
81-
headers: { "content-type": "text/plain" },
82-
});
83-
})
84-
.get("/baseline2", ({ query }) => {
85-
let sum = 0;
86-
for (const v of Object.values(query)) sum += parseInt(v as string, 10) || 0;
87-
return new Response(String(sum), {
88-
headers: { "content-type": "text/plain" },
89-
});
90-
})
91-
.get("/json/:count", ({ params, query, headers, set }) => {
92-
const count = Math.max(
93-
0,
94-
Math.min(+params.count || 0, datasetItems.length),
95-
);
96-
const m = query.m ? +query.m || 1 : 1;
52+
const n = +(body as string);
53+
if (!isNaN(n)) total += n;
54+
55+
return total;
56+
},
57+
{
58+
parse: "text",
59+
},
60+
)
61+
.get("/baseline2", ({ query }) => {
62+
let sum = 0;
63+
for (const v of Object.values(query)) sum += +v || 0;
64+
return sum;
65+
})
66+
.get("/json/:count", ({ params, query, headers, set }) => {
67+
const count = Math.max(
68+
0,
69+
Math.min(+params.count || 0, datasetItems.length),
70+
);
71+
const m = query.m ? +query.m || 1 : 1;
9772

98-
const result = {
99-
count,
100-
items: datasetItems.slice(0, count).map((d: any) => ({
101-
id: d.id,
102-
name: d.name,
103-
category: d.category,
104-
price: d.price,
105-
quantity: d.quantity,
106-
active: d.active,
107-
tags: d.tags,
108-
rating: d.rating,
109-
total: d.price * d.quantity * m,
110-
})),
111-
};
73+
const result = {
74+
count,
75+
items: datasetItems.slice(0, count).map((d: any) => ({
76+
id: d.id,
77+
name: d.name,
78+
category: d.category,
79+
price: d.price,
80+
quantity: d.quantity,
81+
active: d.active,
82+
tags: d.tags,
83+
rating: d.rating,
84+
total: d.price * d.quantity * m,
85+
})),
86+
};
11287

113-
const encoding = headers["accept-encoding"];
114-
if (encoding) {
115-
const index = encoding.indexOf(",");
116-
const type = index === -1 ? encoding : encoding.slice(0, index);
88+
const encoding = headers["accept-encoding"];
89+
if (encoding) {
90+
const index = encoding.indexOf(",");
91+
const type =
92+
index === -1 ? encoding : encoding.slice(0, index);
11793

118-
set.headers["content-type"] = "application/json";
119-
if (type === "gzip") {
120-
set.headers["content-encoding"] = "gzip";
121-
return Bun.gzipSync(JSON.stringify(result));
122-
} else if (type === "br") {
123-
set.headers["content-encoding"] = "br";
124-
return brotliCompressSync(JSON.stringify(result));
125-
} else if (type === "deflate") {
126-
set.headers["content-encoding"] = "deflate";
127-
return Bun.deflateSync(JSON.stringify(result));
94+
set.headers["content-type"] = "application/json";
95+
if (type === "gzip") {
96+
set.headers["content-encoding"] = "gzip";
97+
return Bun.gzipSync(JSON.stringify(result));
98+
} else if (encoding === "br") {
99+
set.headers["content-encoding"] = "br";
100+
return Bun.deflateSync(JSON.stringify(result));
101+
} else if (encoding === "deflate") {
102+
set.headers["content-encoding"] = "deflate";
103+
return Bun.deflateSync(JSON.stringify(result));
104+
}
128105
}
129-
}
130106

131-
return result;
132-
})
133-
.get("/async-db", async ({ query }) => {
134-
if (!pgPool) {
135-
return new Response(EMPTY_DB_JSON, {
136-
headers: { "content-type": "application/json" },
137-
});
138-
}
139-
const min = parseInt((query.min as string) ?? "", 10) || 10;
140-
const max = parseInt((query.max as string) ?? "", 10) || 50;
141-
const limit = Math.max(1, Math.min(parseInt((query.limit as string) ?? "", 10) || 50, 50));
142-
try {
143-
const result = await pgPool.query(
144-
"SELECT id, name, category, price, quantity, active, tags, rating_score, rating_count FROM items WHERE price BETWEEN $1 AND $2 LIMIT $3",
145-
[min, max, limit],
146-
);
147-
const items = result.rows.map((r: any) => ({
148-
id: r.id,
149-
name: r.name,
150-
category: r.category,
151-
price: r.price,
152-
quantity: r.quantity,
153-
active: r.active,
154-
tags: r.tags,
155-
rating: { score: r.rating_score, count: r.rating_count },
156-
}));
157-
const body = JSON.stringify({ count: items.length, items });
158-
return new Response(body, {
159-
headers: {
160-
"content-type": "application/json",
161-
"content-length": String(Buffer.byteLength(body)),
162-
},
163-
});
164-
} catch (e) {
165-
return new Response(EMPTY_DB_JSON, {
166-
headers: { "content-type": "application/json" },
167-
});
168-
}
169-
})
170-
.post("/upload", async ({ request }) => {
171-
let size = 0;
172-
if (request.body) {
173-
for await (const chunk of request.body as any) {
174-
size += (chunk as Uint8Array).byteLength;
107+
return result;
108+
})
109+
.get("/async-db", async ({ query }) => {
110+
if (!pg) return { items: [], count: 0 };
111+
112+
const min = +query.min || 10;
113+
const max = +query.max || 50;
114+
const limit = Math.max(1, Math.min(+query.limit || 50, 50));
115+
116+
try {
117+
const rows =
118+
await pg`SELECT id, name, category, price, quantity, active, tags, rating_score, rating_count FROM items WHERE price BETWEEN ${min} AND ${max} LIMIT ${limit}`;
119+
120+
return {
121+
count: rows.length,
122+
items: rows.map((r: any) => ({
123+
id: r.id,
124+
name: r.name,
125+
category: r.category,
126+
price: r.price,
127+
quantity: r.quantity,
128+
active: r.active,
129+
tags: r.tags,
130+
rating: {
131+
score: r.rating_score,
132+
count: r.rating_count,
133+
},
134+
})),
135+
};
136+
} catch (_) {
137+
return { items: [], count: 0 };
175138
}
176-
}
177-
return new Response(String(size), {
178-
headers: { "content-type": "text/plain" },
179-
});
180-
})
181-
.use(staticModule)
182-
.all("*", () => new Response("Not found", { status: 404 }))
183-
.listen({ port: 8080, reusePort: true });
139+
})
140+
.post("/upload", async ({ request }) => {
141+
let size = 0;
142+
if (request.body) {
143+
for await (const chunk of request.body as any) {
144+
size += (chunk as Uint8Array).byteLength;
145+
}
146+
}
147+
return new Response(String(size), {
148+
headers: { "content-type": "text/plain" },
149+
});
150+
})
151+
.onError(({ code }) => {
152+
if (code === "NOT_FOUND") return status(404);
153+
})
154+
.listen(8080);
184155
}

site/content/docs/running-locally/setup.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ Clone the repo and (optionally) install native load generators. If you'd rather
88
## Clone the repository
99

1010
```bash
11-
git clone https://github.com/SaltyAom/HttpArena.git
11+
git clone https://github.com/MDA2AV/HttpArena.git
1212
cd HttpArena
1313
```
1414

0 commit comments

Comments
 (0)