Skip to content

Commit caac82b

Browse files
Added tools and fixed async send order
1 parent ddbcf69 commit caac82b

3 files changed

Lines changed: 20 additions & 23 deletions

File tree

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "llmasaservice-client",
33
"license": "MIT",
4-
"version": "0.9.2",
4+
"version": "0.10.0",
55
"main": "dist/index.js",
66
"module": "dist/index.mjs",
77
"types": "dist/index.d.ts",

src/LLMAsAService.tsx

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ export interface LLMServiceType {
1212
customer?: LLMAsAServiceCustomer;
1313
url?: string | null;
1414
agent?: string | null;
15+
tools?: [] | null;
1516
}
1617

1718
export const LLMService = createContext<LLMServiceType | undefined>(undefined);
@@ -22,6 +23,7 @@ interface UserProviderProps {
2223
customer?: LLMAsAServiceCustomer;
2324
url?: string | null;
2425
agent?: string | null;
26+
tools?: [] | null;
2527
}
2628

2729
export const LLMServiceProvider: React.FC<UserProviderProps> = ({

src/useLLM.ts

Lines changed: 17 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,7 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
101101
customer: context?.customer ?? {}, // if no customer, use the projectId as the customer_id
102102
allowCaching: allowCaching,
103103
conversationId: conversation,
104+
tools: context?.tools ?? [],
104105
});
105106

106107
// trying to get cloudfront oac going. posts need to be signed, but when i add this the call fails...
@@ -164,12 +165,11 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
164165
console.error(`Error: Error in fetch. (${errorInFetch})`);
165166
}
166167
}
167-
168168
async function readStream(
169169
reader: ReadableStreamDefaultReader,
170170
decoder: TextDecoder,
171171
stream: Boolean = true,
172-
{ signal: signal }: { signal: AbortSignal },
172+
{ signal }: { signal: AbortSignal },
173173
onComplete?: (result: string) => void,
174174
onError?: (error: string) => void
175175
): Promise<string> {
@@ -178,49 +178,44 @@ export const useLLM = (options?: LLMServiceType): UseLLMReturnType => {
178178

179179
while (true) {
180180
try {
181-
// Check if the stream has been aborted
182181
if (signal.aborted) {
183182
reader.cancel();
184-
setIdle(true);
185183
break;
186184
}
187185

188-
// Read a chunk of data from the stream
189186
const { value, done } = await reader.read();
190187

191-
if (decoder.decode(value).startsWith("Error:")) {
192-
errorInRead = decoder.decode(value).substring(6);
188+
const decoded = decoder.decode(value);
189+
result += decoded;
190+
191+
if (stream) {
192+
setResponse(result);
193+
}
194+
195+
if (decoded.startsWith("Error:")) {
196+
errorInRead = decoded.substring(6);
193197
break;
194198
}
195199

196-
// If the stream has been read to the end, exit the loop
197200
if (done) {
198-
setIdle(true);
199201
break;
200202
}
201-
202-
// Process the chunk of data
203-
result += decoder.decode(value);
204-
if (stream) setResponse((prevState: any) => result);
205203
} catch (error: any) {
206-
if (error.name === "AbortError") {
207-
break;
204+
if (error.name !== "AbortError") {
205+
errorInRead = `Read error: ${error.message}`;
208206
}
209-
210-
errorInRead = `Reading error ${error.message}`;
211207
break;
212208
} finally {
213-
if (signal.aborted) {
214-
reader.releaseLock();
215-
}
209+
if (signal.aborted) reader.releaseLock();
216210
}
217211
}
218212

219-
if (errorInRead !== "") {
213+
setIdle(true);
214+
215+
if (errorInRead) {
220216
setError(errorInRead);
221217
reader.cancel();
222218
if (onError) onError(errorInRead);
223-
setIdle(true);
224219
}
225220

226221
if (onComplete) {

0 commit comments

Comments
 (0)