Skip to content

Commit 608ad03

Browse files
committed
Fix native interface for LLM. Also small functionality extension for tool calling.
1 parent 0c5633a commit 608ad03

4 files changed

Lines changed: 16 additions & 15 deletions

File tree

android/src/main/java/com/swmansion/rnexecutorch/LLM.kt

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import org.pytorch.executorch.extension.llm.LlmCallback
77
import org.pytorch.executorch.extension.llm.LlmModule
88

99
class LLM(
10-
reactContext: ReactApplicationContext,
10+
reactContext: ReactApplicationContext,
1111
) : NativeLLMSpec(reactContext), LlmCallback {
1212
private var llmModule: LlmModule? = null
1313

@@ -26,9 +26,9 @@ class LLM(
2626
}
2727

2828
override fun loadLLM(
29-
modelSource: String,
30-
tokenizerSource: String,
31-
promise: Promise,
29+
modelSource: String,
30+
tokenizerSource: String,
31+
promise: Promise,
3232
) {
3333
try {
3434
llmModule = LlmModule(modelSource, tokenizerSource, 0.7f)
@@ -39,21 +39,21 @@ class LLM(
3939
}
4040

4141
override fun runInference(
42-
input: String,
43-
promise: Promise,
42+
input: String,
43+
promise: Promise,
4444
) {
4545
Thread {
46-
llmModule!!.generate(input, this)
47-
promise.resolve("Inference completed successfully")
48-
}
49-
.start()
46+
llmModule!!.generate(input, this)
47+
promise.resolve("Inference completed successfully")
48+
}
49+
.start()
5050
}
5151

5252
override fun interrupt() {
5353
llmModule!!.stop()
5454
}
5555

56-
override fun deleteModule() {
56+
override fun releaseResources() {
5757
llmModule = null
5858
}
5959

src/controllers/LLMController.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ export class LLMController {
138138
public delete() {
139139
this.onToken?.remove();
140140
this.onToken = null;
141-
this.nativeModule.deleteModule();
141+
this.nativeModule.releaseResources();
142142
}
143143

144144
public async runInference(input: string) {
@@ -184,7 +184,7 @@ export class LLMController {
184184
return;
185185
}
186186

187-
if (!this.toolsConfig) {
187+
if (!this.toolsConfig || this.toolsConfig.displayToolCalls) {
188188
this.responseCallback(
189189
this._response.replace(this.tokenizerConfig.eos_token, '')
190190
);

src/native/NativeLLM.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ export interface Spec extends TurboModule {
66
loadLLM(modelSource: string, tokenizerSource: string): Promise<string>;
77
runInference(input: string): Promise<string>;
88
interrupt(): void;
9-
deleteModule(): void;
9+
releaseResources(): void;
1010

1111
readonly onToken: EventEmitter<string>;
1212
}

src/types/llm.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ export interface LLMType {
66
downloadProgress: number;
77
error: string | null;
88
runInference: (input: string) => Promise<void>;
9-
sendMessage: (message: string, tools?: LLMTool[]) => Promise<void>;
9+
sendMessage: (message: string) => Promise<void>;
1010
interrupt: () => void;
1111
}
1212

@@ -35,6 +35,7 @@ export interface ChatConfig {
3535
export interface ToolsConfig {
3636
tools: LLMTool[];
3737
executeToolCallback: (call: ToolCall) => Promise<string | null>;
38+
displayToolCalls?: boolean;
3839
}
3940

4041
export const SPECIAL_TOKENS = [

0 commit comments

Comments
 (0)