Skip to content

Commit 1971190

Browse files
authored
Chore: platformatic kafka (#263)
* deps update * Adding @platformatic/kafka * Test using @platformatic/kafka * Enablind threads * Removing node-rdkafka * Adding kafka ci * Adding test context * Lint fix * Adding required engine * Using NameAndRegistrationPair
1 parent 693d6eb commit 1971190

6 files changed

Lines changed: 96 additions & 218 deletions

File tree

.github/workflows/ci.yml

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ on:
77
pull_request:
88

99
jobs:
10-
call-ci-flow:
10+
general:
1111
strategy:
1212
matrix:
1313
node-version: [18.x, 20.x, 22.x, 23.x]
@@ -27,8 +27,17 @@ jobs:
2727
node_version: ${{ matrix.node-version }}
2828
package_name: ${{ matrix.package-name }}
2929

30+
kafka:
31+
strategy:
32+
matrix:
33+
node-version: [22.x, 23.x]
34+
uses: kibertoad/message-queue-toolkit/.github/workflows/ci.common.yml@main
35+
with:
36+
node_version: ${{ matrix.node-version }}
37+
package_name: '@message-queue-toolkit/kafka'
38+
3039
automerge:
31-
needs: [ call-ci-flow ]
40+
needs: [ general, kafka ]
3241
runs-on: ubuntu-latest
3342
permissions:
3443
pull-requests: write

packages/kafka/README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
11
# @message-queue-toolkit/kafka
22

33
In development
4+
5+
6+
## Requirements
7+
- Node.js >= 22.14.0

packages/kafka/lib/test.spec.ts

Lines changed: 45 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -1,67 +1,73 @@
1-
import { once } from 'node:events'
1+
import { randomUUID } from 'node:crypto'
22
import { waitAndRetry } from '@lokalise/universal-ts-utils/node'
3-
import { KafkaConsumer, type Message, Producer, features, librdkafkaVersion } from 'node-rdkafka'
3+
import {
4+
Consumer,
5+
type Message,
6+
Producer,
7+
stringDeserializers,
8+
stringSerializers,
9+
} from '@platformatic/kafka'
10+
import { ProduceAcks } from '@platformatic/kafka'
11+
import { afterAll } from 'vitest'
12+
import { type TestContext, registerDependencies } from '../test/testContext.ts'
413

514
// TODO: to be removed once we have proper tests
615
describe('Test', () => {
7-
it('should use node-rdkafka', () => {
8-
expect(features).toBeDefined()
9-
expect(librdkafkaVersion).toBeDefined()
16+
let testContext: TestContext
17+
18+
beforeAll(async () => {
19+
testContext = await registerDependencies()
20+
})
21+
22+
afterAll(async () => {
23+
await testContext.dispose()
1024
})
1125

1226
it('should send and receive a message', { timeout: 10000 }, async () => {
1327
// Given
14-
const brokers = 'localhost:9092'
28+
const clientId = randomUUID()
1529
// Use a fresh, unique topic per run to avoid stale state
1630
const topic = `test-topic-${Date.now()}`
1731
const messageValue = 'My test message'
1832

19-
const receivedMessages: Message[] = []
33+
const receivedMessages: Message<string, string, string, string>[] = []
2034

21-
// Create a producer
35+
// Create producer
2236
const producer = new Producer({
23-
'metadata.broker.list': brokers,
24-
'allow.auto.create.topics': true,
37+
clientId,
38+
bootstrapBrokers: testContext.cradle.kafkaConfig.brokers,
39+
serializers: stringSerializers,
40+
autocreateTopics: true,
2541
})
26-
producer.connect()
27-
await once(producer, 'ready')
2842

29-
// Create a consumer with a unique group and disable auto-commit for fresh offsets
30-
const consumer = new KafkaConsumer(
31-
{
32-
'group.id': 'test-group',
33-
'metadata.broker.list': brokers,
34-
'allow.auto.create.topics': true,
35-
'enable.auto.commit': false,
36-
},
37-
{ 'auto.offset.reset': 'earliest' },
38-
)
39-
consumer.connect()
43+
// Create consumer
44+
const consumer = new Consumer({
45+
clientId,
46+
groupId: randomUUID(),
47+
bootstrapBrokers: testContext.cradle.kafkaConfig.brokers,
48+
deserializers: stringDeserializers,
49+
autocreateTopics: true,
50+
})
4051

41-
await new Promise<void>((resolve, reject) =>
42-
consumer
43-
.on('ready', () => {
44-
consumer.subscribe([topic])
45-
consumer.consume()
46-
resolve()
47-
})
48-
.on('event.error', (err) => reject(err))
49-
.on('data', (data) => {
50-
receivedMessages.push(data)
51-
}),
52-
)
52+
const stream = await consumer.consume({ topics: [topic] })
53+
stream.on('data', (message) => {
54+
receivedMessages.push(message)
55+
stream.close()
56+
})
5357

5458
// When
55-
producer.produce(topic, null, Buffer.from(messageValue))
56-
producer.flush()
59+
await producer.send({
60+
messages: [{ topic, value: messageValue }],
61+
acks: ProduceAcks.NO_RESPONSE,
62+
})
5763

5864
// Then
5965
await waitAndRetry(() => receivedMessages.length > 0, 10, 800)
6066
expect(receivedMessages).toHaveLength(1)
6167
expect(receivedMessages[0]?.value?.toString()).toBe(messageValue)
6268

6369
// Cleanup
64-
producer.disconnect()
65-
consumer.disconnect()
70+
producer.close()
71+
consumer.close()
6672
})
6773
})

packages/kafka/package.json

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
{
22
"name": "@message-queue-toolkit/kafka",
33
"version": "0.0.0",
4+
"engines": { "node": ">= 22.14.0" },
45
"private": false,
56
"license": "MIT",
67
"homepage": "https://github.com/kibertoad/message-queue-toolkit",
@@ -44,14 +45,14 @@
4445
"prepublishOnly": "npm run lint && npm run build"
4546
},
4647
"dependencies": {
47-
"@lokalise/node-core": "^13.3.0",
48+
"@lokalise/node-core": "^13.6.0",
4849
"@lokalise/universal-ts-utils": "^4.4.0",
49-
"node-rdkafka": "^3.3.1",
50+
"@platformatic/kafka": "^1.0.0",
5051
"zod": "^3.23.8"
5152
},
5253
"peerDependencies": {
53-
"@message-queue-toolkit/core": ">=20.0.0",
54-
"@message-queue-toolkit/schemas": ">=2.0.0"
54+
"@message-queue-toolkit/core": ">=21.0.0",
55+
"@message-queue-toolkit/schemas": ">=6.0.0"
5556
},
5657
"devDependencies": {
5758
"@biomejs/biome": "1.9.4",

0 commit comments

Comments
 (0)