Skip to content

Commit dae9356

Browse files
fix: unify types to work on android
1 parent c176e20 commit dae9356

4 files changed

Lines changed: 8 additions & 8 deletions

File tree

packages/react-native-executorch/common/rnexecutorch/models/llm/LLM.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ size_t LLM::getPromptTokenCount() const noexcept {
8686
return runner->stats_.num_prompt_tokens;
8787
}
8888

89-
size_t LLM::countTextTokens(std::string text) const {
89+
int32_t LLM::countTextTokens(std::string text) const {
9090
if (!runner || !runner->is_loaded()) {
9191
throw RnExecutorchError(
9292
RnExecutorchErrorCode::ModuleNotLoaded,
@@ -147,7 +147,7 @@ void LLM::setTopp(float topp) {
147147
runner->set_topp(topp);
148148
}
149149

150-
size_t LLM::getMaxContextLength() const {
150+
int32_t LLM::getMaxContextLength() const {
151151
if (!runner || !runner->is_loaded()) {
152152
throw RnExecutorchError(
153153
RnExecutorchErrorCode::ModuleNotLoaded,

packages/react-native-executorch/common/rnexecutorch/models/llm/LLM.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,13 @@ class LLM : public BaseModel {
2525
void unload() noexcept;
2626
size_t getGeneratedTokenCount() const noexcept;
2727
size_t getPromptTokenCount() const noexcept;
28-
size_t countTextTokens(std::string text) const;
28+
int32_t countTextTokens(std::string text) const;
2929
size_t getMemoryLowerBound() const noexcept;
3030
void setCountInterval(size_t countInterval);
3131
void setTemperature(float temperature);
3232
void setTopp(float topp);
3333
void setTimeInterval(size_t timeInterval);
34-
size_t getMaxContextLength() const;
34+
int32_t getMaxContextLength() const;
3535

3636
private:
3737
std::unique_ptr<example::Runner> runner;

packages/react-native-executorch/common/runner/runner.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -342,14 +342,14 @@ void Runner::set_topp(float topp) noexcept {
342342
}
343343
}
344344

345-
size_t Runner::get_max_context_length() const {
345+
int32_t Runner::get_max_context_length() const {
346346
if (!is_loaded()) {
347347
return metadata_.at(kMaxContextLen);
348348
}
349349
return config_.max_context_length;
350350
}
351351

352-
size_t Runner::count_text_tokens(const std::string &text) const {
352+
int32_t Runner::count_text_tokens(const std::string &text) const {
353353
auto encodeResult =
354354
tokenizer_->encode(text, numOfAddedBoSTokens, numOfAddedEoSTokens);
355355

packages/react-native-executorch/common/runner/runner.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@ class Runner : public llm::IRunner {
5050
void set_time_interval(size_t time_interval);
5151
void set_temperature(float temperature) noexcept;
5252
void set_topp(float topp) noexcept;
53-
size_t count_text_tokens(const std::string &text) const;
54-
size_t get_max_context_length() const;
53+
int32_t count_text_tokens(const std::string &text) const;
54+
int32_t get_max_context_length() const;
5555

5656
void stop() override;
5757
void reset() override;

0 commit comments

Comments
 (0)