Skip to content

Commit 744656c

Browse files
Add Fastify: Node.js web framework (~33k stars)
1 parent 511d334 commit 744656c

5 files changed

Lines changed: 297 additions & 0 deletions

File tree

frameworks/fastify/Dockerfile

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
FROM node:22-slim
2+
RUN apt-get update && apt-get install -y --no-install-recommends python3 make g++ && rm -rf /var/lib/apt/lists/*
3+
WORKDIR /app
4+
COPY package.json .
5+
RUN npm install --omit=dev
6+
COPY server.js .
7+
ENV NODE_ENV=production
8+
EXPOSE 8080
9+
CMD ["node", "server.js"]

frameworks/fastify/README.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
# Fastify
2+
3+
[Fastify](https://github.com/fastify/fastify) is a fast and low-overhead web framework for Node.js. It's designed to be highly performant with a powerful plugin architecture and built-in schema validation via JSON Schema.
4+
5+
- **Language:** JavaScript (Node.js)
6+
- **Version:** 5.x
7+
- **Concurrency:** Node.js cluster module (one worker per CPU)
8+
- **Notable:** Built-in JSON serialization optimization, schema-based validation, encapsulation via plugins

frameworks/fastify/meta.json

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
{
2+
"display_name": "Fastify",
3+
"language": "JS",
4+
"type": "framework",
5+
"engine": "V8",
6+
"description": "Fast and low-overhead Node.js web framework built for developer experience and performance.",
7+
"repo": "https://github.com/fastify/fastify",
8+
"enabled": true,
9+
"tests": [
10+
"baseline",
11+
"pipelined",
12+
"limited-conn",
13+
"json",
14+
"upload",
15+
"compression",
16+
"mixed",
17+
"noisy",
18+
"baseline-h2",
19+
"static-h2"
20+
]
21+
}

frameworks/fastify/package.json

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"name": "httparena-fastify",
3+
"version": "1.0.0",
4+
"private": true,
5+
"dependencies": {
6+
"fastify": "^5.2.0",
7+
"better-sqlite3": "^11.0.0"
8+
}
9+
}

frameworks/fastify/server.js

Lines changed: 250 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,250 @@
1+
const cluster = require('cluster');
2+
const os = require('os');
3+
const fs = require('fs');
4+
const http2 = require('http2');
5+
const zlib = require('zlib');
6+
7+
const SERVER_NAME = 'fastify';
8+
9+
// --- Shared data (loaded per-worker) ---
10+
let datasetItems;
11+
let largeJsonBuf;
12+
let dbStmt;
13+
const staticFiles = {};
14+
const MIME_TYPES = {
15+
'.css': 'text/css', '.js': 'application/javascript', '.html': 'text/html',
16+
'.woff2': 'font/woff2', '.svg': 'image/svg+xml', '.webp': 'image/webp', '.json': 'application/json'
17+
};
18+
19+
function loadStaticFiles() {
20+
const dir = '/data/static';
21+
try {
22+
for (const name of fs.readdirSync(dir)) {
23+
const buf = fs.readFileSync(dir + '/' + name);
24+
const ext = name.slice(name.lastIndexOf('.'));
25+
staticFiles[name] = { buf, ct: MIME_TYPES[ext] || 'application/octet-stream' };
26+
}
27+
} catch (e) {}
28+
}
29+
30+
function loadDataset() {
31+
const path = process.env.DATASET_PATH || '/data/dataset.json';
32+
try {
33+
datasetItems = JSON.parse(fs.readFileSync(path, 'utf8'));
34+
} catch (e) {}
35+
}
36+
37+
function loadLargeDataset() {
38+
try {
39+
const raw = JSON.parse(fs.readFileSync('/data/dataset-large.json', 'utf8'));
40+
const items = raw.map(d => ({
41+
id: d.id, name: d.name, category: d.category,
42+
price: d.price, quantity: d.quantity, active: d.active,
43+
tags: d.tags, rating: d.rating,
44+
total: Math.round(d.price * d.quantity * 100) / 100
45+
}));
46+
largeJsonBuf = Buffer.from(JSON.stringify({ items, count: items.length }));
47+
} catch (e) {}
48+
}
49+
50+
function loadDatabase() {
51+
try {
52+
const Database = require('better-sqlite3');
53+
const db = new Database('/data/benchmark.db', { readonly: true });
54+
db.pragma('mmap_size=268435456');
55+
dbStmt = db.prepare('SELECT id, name, category, price, quantity, active, tags, rating_score, rating_count FROM items WHERE price BETWEEN ? AND ? LIMIT 50');
56+
} catch (e) {}
57+
}
58+
59+
function sumQuery(query) {
60+
let sum = 0;
61+
if (query) {
62+
for (const key of Object.keys(query)) {
63+
const n = parseInt(query[key], 10);
64+
if (n === n) sum += n;
65+
}
66+
}
67+
return sum;
68+
}
69+
70+
function startWorker() {
71+
loadDataset();
72+
loadLargeDataset();
73+
loadStaticFiles();
74+
loadDatabase();
75+
76+
const Fastify = require('fastify');
77+
const app = Fastify({ logger: false });
78+
79+
// --- /pipeline ---
80+
app.get('/pipeline', (req, reply) => {
81+
reply.header('server', SERVER_NAME).type('text/plain').send('ok');
82+
});
83+
84+
// --- /baseline11 GET & POST ---
85+
app.get('/baseline11', (req, reply) => {
86+
const s = sumQuery(req.query);
87+
reply.header('server', SERVER_NAME).type('text/plain').send(String(s));
88+
});
89+
90+
app.post('/baseline11', async (req, reply) => {
91+
const querySum = sumQuery(req.query);
92+
// Fastify parses body based on content-type; for raw/text we collect manually
93+
const body = await collectBody(req.raw);
94+
let total = querySum;
95+
const n = parseInt(body.trim(), 10);
96+
if (n === n) total += n;
97+
reply.header('server', SERVER_NAME).type('text/plain').send(String(total));
98+
});
99+
100+
// --- /baseline2 ---
101+
app.get('/baseline2', (req, reply) => {
102+
const s = sumQuery(req.query);
103+
reply.header('server', SERVER_NAME).type('text/plain').send(String(s));
104+
});
105+
106+
// --- /json ---
107+
app.get('/json', (req, reply) => {
108+
if (!datasetItems) {
109+
reply.code(500).send('No dataset');
110+
return;
111+
}
112+
const items = datasetItems.map(d => ({
113+
id: d.id, name: d.name, category: d.category,
114+
price: d.price, quantity: d.quantity, active: d.active,
115+
tags: d.tags, rating: d.rating,
116+
total: Math.round(d.price * d.quantity * 100) / 100
117+
}));
118+
const buf = Buffer.from(JSON.stringify({ items, count: items.length }));
119+
reply
120+
.header('server', SERVER_NAME)
121+
.header('content-type', 'application/json')
122+
.header('content-length', buf.length)
123+
.send(buf);
124+
});
125+
126+
// --- /compression ---
127+
app.get('/compression', (req, reply) => {
128+
if (!largeJsonBuf) {
129+
reply.code(500).send('No dataset');
130+
return;
131+
}
132+
const compressed = zlib.gzipSync(largeJsonBuf, { level: 1 });
133+
reply
134+
.header('server', SERVER_NAME)
135+
.header('content-type', 'application/json')
136+
.header('content-encoding', 'gzip')
137+
.header('content-length', compressed.length)
138+
.send(compressed);
139+
});
140+
141+
// --- /db ---
142+
app.get('/db', (req, reply) => {
143+
if (!dbStmt) {
144+
reply.header('server', SERVER_NAME).type('application/json').send('{"items":[],"count":0}');
145+
return;
146+
}
147+
let min = 10, max = 50;
148+
if (req.query.min) min = parseFloat(req.query.min) || 10;
149+
if (req.query.max) max = parseFloat(req.query.max) || 50;
150+
const rows = dbStmt.all(min, max);
151+
const items = rows.map(r => ({
152+
id: r.id, name: r.name, category: r.category,
153+
price: r.price, quantity: r.quantity, active: r.active === 1,
154+
tags: JSON.parse(r.tags),
155+
rating: { score: r.rating_score, count: r.rating_count }
156+
}));
157+
const body = JSON.stringify({ items, count: items.length });
158+
reply
159+
.header('server', SERVER_NAME)
160+
.header('content-type', 'application/json')
161+
.header('content-length', Buffer.byteLength(body))
162+
.send(body);
163+
});
164+
165+
// --- /upload ---
166+
app.post('/upload', async (req, reply) => {
167+
const body = await collectRawBody(req.raw);
168+
reply.header('server', SERVER_NAME).type('text/plain').send(String(body.length));
169+
});
170+
171+
// Helper: collect raw body as string
172+
function collectBody(raw) {
173+
return new Promise((resolve) => {
174+
let body = '';
175+
raw.on('data', chunk => body += chunk);
176+
raw.on('end', () => resolve(body));
177+
});
178+
}
179+
180+
// Helper: collect raw body as buffer
181+
function collectRawBody(raw) {
182+
return new Promise((resolve) => {
183+
const chunks = [];
184+
raw.on('data', chunk => chunks.push(chunk));
185+
raw.on('end', () => resolve(Buffer.concat(chunks)));
186+
});
187+
}
188+
189+
// Start HTTP/1.1 server
190+
app.listen({ port: 8080, host: '0.0.0.0' }).then(() => {
191+
// Also start HTTP/2 server on 8443
192+
startH2();
193+
});
194+
}
195+
196+
function startH2() {
197+
const certFile = process.env.TLS_CERT || '/certs/server.crt';
198+
const keyFile = process.env.TLS_KEY || '/certs/server.key';
199+
try {
200+
const opts = {
201+
cert: fs.readFileSync(certFile),
202+
key: fs.readFileSync(keyFile),
203+
allowHTTP1: false,
204+
};
205+
const h2server = http2.createSecureServer(opts, (req, res) => {
206+
const url = req.url;
207+
const q = url.indexOf('?');
208+
const p = q === -1 ? url : url.slice(0, q);
209+
if (p.startsWith('/static/')) {
210+
const name = p.slice(8);
211+
const sf = staticFiles[name];
212+
if (sf) {
213+
res.writeHead(200, { 'content-type': sf.ct, 'content-length': sf.buf.length, 'server': SERVER_NAME });
214+
res.end(sf.buf);
215+
} else {
216+
res.writeHead(404);
217+
res.end();
218+
}
219+
} else {
220+
// baseline h2
221+
let sum = 0;
222+
if (q !== -1) {
223+
const qs = url.slice(q + 1);
224+
let i = 0;
225+
while (i < qs.length) {
226+
const eq = qs.indexOf('=', i);
227+
if (eq === -1) break;
228+
let amp = qs.indexOf('&', eq);
229+
if (amp === -1) amp = qs.length;
230+
const n = parseInt(qs.slice(eq + 1, amp), 10);
231+
if (n === n) sum += n;
232+
i = amp + 1;
233+
}
234+
}
235+
res.writeHead(200, { 'content-type': 'text/plain', 'server': SERVER_NAME });
236+
res.end(String(sum));
237+
}
238+
});
239+
h2server.listen(8443);
240+
} catch (e) {
241+
// TLS certs not available, skip H2
242+
}
243+
}
244+
245+
if (cluster.isPrimary) {
246+
const numCPUs = os.availableParallelism ? os.availableParallelism() : os.cpus().length;
247+
for (let i = 0; i < numCPUs; i++) cluster.fork();
248+
} else {
249+
startWorker();
250+
}

0 commit comments

Comments
 (0)