Skip to content

Commit dc2b6a1

Browse files
skeptrunedevcdxker
authored andcommitted
feat: update openai_dive dependency and enhance DatasetConfiguration with LLM_API_VERSION
- Updated the openai_dive dependency version from 1.0.1 to 1.2.2 in Cargo.toml and Cargo.lock. - Added LLM_API_VERSION field to DatasetConfiguration and DatasetConfigurationDTO structs. - Modified the conversion implementations for DatasetConfiguration and DatasetConfigurationDTO to handle the new LLM_API_VERSION field. - Updated dataset_handler, invitation_handler, organization_handler, payment_handler, and message_handler to remove unnecessary user verification checks. - Enhanced message handling to include LLM_API_VERSION in API requests. - Adjusted image handling to support various MIME types.
1 parent 94175af commit dc2b6a1

File tree

14 files changed

+281
-18449
lines changed

14 files changed

+281
-18449
lines changed

clients/ts-sdk/openapi.json

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12264,6 +12264,11 @@
1226412264
"description": "Whether to only use indexed chunks",
1226512265
"nullable": true
1226612266
},
12267+
"LLM_API_VERSION": {
12268+
"type": "string",
12269+
"description": "The API version for the LLM API",
12270+
"nullable": true
12271+
},
1226712272
"LLM_BASE_URL": {
1226812273
"type": "string",
1226912274
"description": "The base URL for the LLM API",

clients/ts-sdk/src/types.gen.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1397,6 +1397,10 @@ export type DatasetConfigurationDTO = {
13971397
* Whether to only use indexed chunks
13981398
*/
13991399
INDEXED_ONLY?: (boolean) | null;
1400+
/**
1401+
* The API version for the LLM API
1402+
*/
1403+
LLM_API_VERSION?: (string) | null;
14001404
/**
14011405
* The base URL for the LLM API
14021406
*/

frontends/chat/vite.config.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,12 @@ import runtimeEnv from "vite-plugin-runtime-env";
44

55
export default defineConfig({
66
plugins: [solid(), runtimeEnv()],
7+
optimizeDeps: {
8+
include: ["debug", "extend"],
9+
},
10+
build: {
11+
commonjsOptions: {
12+
include: [/debug/, /extend/, /node_modules/],
13+
},
14+
},
715
});

frontends/dashboard/src/components/dataset-settings/LLMSettings.tsx

Lines changed: 154 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,42 @@
11
import { Tooltip } from "shared/ui";
22
import { AiOutlineInfoCircle } from "solid-icons/ai";
3-
import { Accessor, createEffect, onCleanup } from "solid-js";
3+
import { Accessor, createEffect, onCleanup, For, createSignal } from "solid-js";
44
import { DatasetConfig } from "./LegacySettingsWrapper";
55

66
export const LLMSettings = (props: {
77
serverConfig: Accessor<DatasetConfig>;
88
setServerConfig: (config: (prev: DatasetConfig) => DatasetConfig) => void;
99
saveConfig?: () => void;
1010
}) => {
11+
const [showOptions, setShowOptions] = createSignal(false);
12+
const [filteredOptions, setFilteredOptions] = createSignal<string[]>([]);
13+
14+
// Predefined LLM API URLs
15+
const availableLLMUrls = [
16+
"https://api.openai.com/v1",
17+
"https://openrouter.ai/api/v1",
18+
"https://api.groq.com/openai/v1",
19+
];
20+
21+
// Filter options based on current input
22+
const filterOptions = (inputValue: string) => {
23+
if (!inputValue.trim()) {
24+
setFilteredOptions(availableLLMUrls);
25+
return;
26+
}
27+
28+
const filtered = availableLLMUrls.filter((url) =>
29+
url.toLowerCase().includes(inputValue.toLowerCase()),
30+
);
31+
setFilteredOptions(filtered);
32+
};
33+
34+
// Initialize filtered options
35+
createEffect(() => {
36+
const currentUrl = props.serverConfig().LLM_BASE_URL ?? "";
37+
filterOptions(currentUrl);
38+
});
39+
1140
createEffect(() => {
1241
const handleKeyDown = (event: KeyboardEvent) => {
1342
if ((event.ctrlKey || event.metaKey) && event.key === "s") {
@@ -48,6 +77,7 @@ export const LLMSettings = (props: {
4877
</span>
4978

5079
<div class="mt-4 grid grid-cols-4 gap-x-3 gap-y-6">
80+
{/* LLM API URL Combobox */}
5181
<div class="col-span-4 sm:col-span-2">
5282
<label
5383
for="llmAPIURL"
@@ -56,38 +86,97 @@ export const LLMSettings = (props: {
5686
LLM API URL
5787
<Tooltip
5888
body={<AiOutlineInfoCircle />}
59-
tooltipText="Select the API URL to use for the LLM. Contact us or use the API if you need a custom URL."
89+
tooltipText="Select the API URL to use for the LLM, or enter your own. Contact us or use the API if you need a custom URL."
6090
/>
6191
</label>
62-
<select
63-
name="llmAPIURL"
64-
id="llmAPIURL"
65-
class="block w-full rounded-md border-[0.5px] border-neutral-300 bg-white px-3 py-2 shadow-sm placeholder:text-neutral-400 focus:outline-magenta-500 sm:text-sm sm:leading-6"
66-
value={props.serverConfig().LLM_BASE_URL?.toString()}
67-
onInput={(e) =>
68-
props.setServerConfig((prev) => {
69-
const updatedConfig = {
70-
...prev,
71-
LLM_BASE_URL: e.currentTarget.value,
72-
};
73-
if (prev.LLM_BASE_URL !== e.currentTarget.value) {
74-
updatedConfig.LLM_API_KEY = null;
75-
}
76-
77-
return updatedConfig;
78-
})
79-
}
80-
>
81-
<option value="https://api.openai.com/v1">
82-
https://api.openai.com/v1
83-
</option>
84-
<option value="https://openrouter.ai/api/v1">
85-
https://openrouter.ai/api/v1
86-
</option>
87-
<option value="https://api.groq.com/openai/v1">
88-
https://api.groq.com/openai/v1
89-
</option>
90-
</select>
92+
<div class="relative">
93+
<input
94+
type="text"
95+
name="llmAPIURL"
96+
id="llmAPIURL"
97+
class="block w-full rounded-md border-[0.5px] border-neutral-300 bg-white px-3 py-2 shadow-sm placeholder:text-neutral-400 focus:outline-magenta-500 sm:text-sm sm:leading-6"
98+
value={props.serverConfig().LLM_BASE_URL ?? ""}
99+
placeholder="Enter or select an API URL"
100+
autocomplete="off"
101+
onInput={(e) => {
102+
const value = e.currentTarget.value;
103+
props.setServerConfig((prev) => {
104+
const updatedConfig = {
105+
...prev,
106+
LLM_BASE_URL: value,
107+
};
108+
if (prev.LLM_BASE_URL !== value) {
109+
updatedConfig.LLM_API_KEY = null;
110+
}
111+
return updatedConfig;
112+
});
113+
filterOptions(value);
114+
setShowOptions(true);
115+
}}
116+
onFocus={() => {
117+
const currentValue =
118+
props.serverConfig().LLM_BASE_URL ?? "";
119+
filterOptions(currentValue);
120+
setShowOptions(true);
121+
}}
122+
onBlur={() => setTimeout(() => setShowOptions(false), 100)}
123+
/>
124+
<button
125+
type="button"
126+
class="absolute inset-y-0 right-2 flex items-center px-1 text-neutral-400 hover:text-magenta-500 focus:outline-none"
127+
tabIndex={-1}
128+
onClick={() => {
129+
const currentValue =
130+
props.serverConfig().LLM_BASE_URL ?? "";
131+
filterOptions(currentValue);
132+
setShowOptions((v) => !v);
133+
}}
134+
>
135+
<svg
136+
class="h-4 w-4"
137+
fill="none"
138+
stroke="currentColor"
139+
stroke-width="2"
140+
viewBox="0 0 24 24"
141+
>
142+
<path
143+
stroke-linecap="round"
144+
stroke-linejoin="round"
145+
d="M19 9l-7 7-7-7"
146+
/>
147+
</svg>
148+
</button>
149+
{showOptions() && (
150+
<ul class="absolute z-10 mt-1 max-h-40 w-full overflow-auto rounded-md border border-neutral-200 bg-white py-1 text-base shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none sm:text-sm">
151+
{filteredOptions().length === 0 && (
152+
<li class="cursor-default select-none px-4 py-2 text-neutral-400">
153+
No options
154+
</li>
155+
)}
156+
<For each={filteredOptions()}>
157+
{(url) => (
158+
<li
159+
class={`cursor-pointer select-none px-4 py-2 hover:bg-magenta-50 ${
160+
url === props.serverConfig().LLM_BASE_URL
161+
? "bg-magenta-100 text-magenta-700"
162+
: ""
163+
}`}
164+
onMouseDown={() => {
165+
props.setServerConfig((prev) => ({
166+
...prev,
167+
LLM_BASE_URL: url,
168+
LLM_API_KEY: null,
169+
}));
170+
setShowOptions(false);
171+
}}
172+
>
173+
{url}
174+
</li>
175+
)}
176+
</For>
177+
</ul>
178+
)}
179+
</div>
91180
</div>
92181

93182
<div class="col-span-4 sm:col-span-2">
@@ -137,6 +226,40 @@ export const LLMSettings = (props: {
137226
</div>
138227
</div>
139228

229+
<div class="col-span-4 sm:col-span-2">
230+
<label
231+
for="llmAPIVersion"
232+
class="flex items-center gap-2 text-sm font-medium leading-6"
233+
>
234+
LLM API Version
235+
<Tooltip
236+
body={<AiOutlineInfoCircle />}
237+
tooltipText="Specify the API version for the LLM if required by your provider. Azure OpenAI Service requires this to be set."
238+
/>
239+
</label>
240+
<input
241+
type="text"
242+
name="llmAPIVersion"
243+
id="llmAPIVersion"
244+
class="block w-full rounded-md border-[0.5px] border-neutral-300 px-3 py-1.5 shadow-sm placeholder:text-neutral-400 focus:outline-magenta-500 sm:text-sm sm:leading-6"
245+
value={
246+
(props.serverConfig().LLM_API_VERSION as
247+
| string
248+
| undefined) ?? ""
249+
}
250+
onInput={(e) =>
251+
props.setServerConfig((prev) => ({
252+
...prev,
253+
LLM_API_VERSION:
254+
e.currentTarget.value != ""
255+
? e.currentTarget.value
256+
: null,
257+
}))
258+
}
259+
placeholder="Enter API version (optional)"
260+
/>
261+
</div>
262+
140263
<div class="col-span-4 sm:col-span-2">
141264
<label
142265
for="llmAPIURL"

0 commit comments

Comments
 (0)