-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpackage.json
More file actions
49 lines (49 loc) · 1.17 KB
/
package.json
File metadata and controls
49 lines (49 loc) · 1.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
{
"name": "llmasaservice-client",
"license": "MIT",
"version": "0.11.0",
"main": "dist/index.js",
"module": "dist/index.mjs",
"types": "dist/index.d.ts",
"files": [
"dist"
],
"scripts": {
"build": "tsup index.ts --format cjs,esm --dts --sourcemap",
"lint": "tsc",
"test": "vitest run"
},
"peerDependencies": {
"react": "^18.3.0",
"react-dom": "^18.3.1"
},
"devDependencies": {
"@changesets/cli": "^2.27.7",
"@testing-library/react": "^16.3.2",
"@types/react": "^18.3.3",
"@vitejs/plugin-react": "^6.0.1",
"jsdom": "^29.0.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"tsup": "^8.1.0",
"typescript": "^5.5.3",
"vitest": "^4.1.2"
},
"description": "HOC and hook to use the LLMAsAService.io LLM load balancer and firewall",
"author": "CASEY, Inc. <help@heycasey.io>",
"keywords": [
"react",
"llmasaservice",
"llm",
"openAI",
"chat"
],
"homepage": "https://llmasaservice.io",
"repository": {
"type": "git",
"url": "git+https://github.com/PredictabilityAtScale/useLLM.git"
},
"bugs": {
"url": "https://github.com/PredictabilityAtScale/useLLM/issues"
}
}