Skip to content

Commit 8658af1

Browse files
committed
qwen3tts default to cpu unless gpu selected
1 parent da2bde4 commit 8658af1

7 files changed

Lines changed: 140 additions & 18 deletions

File tree

embd_res/klite.embd

Lines changed: 130 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4355,6 +4355,7 @@ Current version indicated by LITEVER below.
43554355
saved_oai_role: 3, //0=user,1=assistant,2=system, 3=auto
43564356
saved_a1111_url: default_a1111_base,
43574357
saved_comfy_url: default_comfy_base,
4358+
saved_comfy_bearer_token: "",
43584359
saved_xtts_url: default_xtts_base,
43594360
saved_alltalk_url: default_alltalk_base,
43604361
saved_mcp_urls: "",
@@ -8670,8 +8671,23 @@ Current version indicated by LITEVER below.
86708671
{
86718672
console.log("Attempt ComfyUI Connection...");
86728673
//establish initial connection to a1111 api
8673-
fetch(localsettings.saved_comfy_url + comfy_models_endpoint)
8674-
.then(x => x.json())
8674+
let headers = {};
8675+
if (localsettings.saved_comfy_bearer_token != "") {
8676+
headers['Authorization'] = 'Bearer '+localsettings.saved_comfy_bearer_token
8677+
}
8678+
fetch(localsettings.saved_comfy_url + comfy_models_endpoint, {
8679+
headers: headers
8680+
})
8681+
.then(x => {
8682+
if (!silent && !x.ok) {
8683+
msg = "Connection returned "+x.status
8684+
if (x.status == 401) {
8685+
msg +=". Check Bearer Token."
8686+
}
8687+
msgbox(msg)
8688+
}
8689+
return x.json()
8690+
})
86758691
.then(modelsdata => {
86768692

86778693
if (modelsdata == null || modelsdata.length == 0) {
@@ -8690,12 +8706,12 @@ Current version indicated by LITEVER below.
86908706
}).catch((error) => {
86918707
if(!silent)
86928708
{
8693-
msgbox("ComfyUI Connect Error: " + error+"\nPlease make sure ComfyUI is running at "+localsettings.saved_comfy_url+" and properly configured!\n\nIt must be launched with the flags --listen --enable-cors-header '*' to enable API access\n");
8709+
msgbox("ComfyUI Connect Error: " + error+"\nPlease make sure ComfyUI is running at "+localsettings.saved_comfy_url+" and properly configured!\n\nIt must be launched with the flags --listen --enable-cors-header '*' to enable API access. If using ComfyUI-Login, the Bearer Token must match.\n");
86948710
}
86958711
comfyui_is_connected = false;
86968712
});
86978713

8698-
fetch(localsettings.saved_comfy_url + comfy_loras_endpoint)
8714+
fetch(localsettings.saved_comfy_url + comfy_loras_endpoint, {headers:headers})
86998715
.then(x => x.json())
87008716
.then(lorasdata => {
87018717
//repopulate our lora list
@@ -8735,9 +8751,14 @@ Current version indicated by LITEVER below.
87358751
const formData = new FormData();
87368752
formData.append('image', blob, filename);
87378753
formData.append('overwrite',1);
8754+
let headers = {};
8755+
if (localsettings.saved_comfy_bearer_token != "") {
8756+
headers['Authorization'] = 'Bearer '+localsettings.saved_comfy_bearer_token
8757+
}
87388758
return fetch(`${upload_endpoint}`, {
87398759
method: 'POST',
87408760
body: formData,
8761+
headers: headers,
87418762
}).then(response => {
87428763
if (!response.ok) {
87438764
throw new Error(`Upload failed with status ${response.status}`);
@@ -8874,11 +8895,13 @@ Current version indicated by LITEVER below.
88748895
image_db[imgid].type = 0; //0=image, 1=audio
88758896

88768897
uploadBase64ImgToComfy(req_payload["source_image"],comfyimg2imgname).then(() => {
8898+
headers = {'Content-Type': 'application/json'}
8899+
if (localsettings.saved_comfy_bearer_token != "") {
8900+
headers['Authorization'] = 'Bearer '+localsettings.saved_comfy_bearer_token
8901+
}
88778902
fetch(gen_endpoint, {
88788903
method: 'POST',
8879-
headers: {
8880-
'Content-Type': 'application/json',
8881-
},
8904+
headers: headers,
88828905
body: JSON.stringify(genimg_payload),
88838906
})
88848907
.then(x => x.json())
@@ -9033,6 +9056,15 @@ Current version indicated by LITEVER below.
90339056
},false);
90349057
}
90359058

9059+
function set_comfy_key()
9060+
{
9061+
inputBox("Enter ComfyUI API Auth Bearer Token","ComfyUI API Key",localsettings.saved_comfy_bearer_token,"Input ComfyUI API Key", ()=>{
9062+
let userinput = getInputBoxValue();
9063+
userinput = userinput.trim();
9064+
localsettings.saved_comfy_bearer_token = userinput.trim();
9065+
},true);
9066+
}
9067+
90369068
function generate_pollinations_image(req_payload, autoappend)
90379069
{
90389070
let splits = req_payload.prompt.split("###");
@@ -9835,6 +9867,7 @@ Current version indicated by LITEVER below.
98359867
new_save_storyobj.savedsettings.saved_kai_key = "";
98369868
new_save_storyobj.savedsettings.saved_a1111_url = "";
98379869
new_save_storyobj.savedsettings.saved_comfy_url = "";
9870+
new_save_storyobj.savedsettings.saved_comfy_bearer_token = "";
98389871
new_save_storyobj.savedsettings.saved_xtts_url = "";
98399872
new_save_storyobj.savedsettings.saved_mcp_urls = "";
98409873

@@ -9976,6 +10009,22 @@ Current version indicated by LITEVER below.
997610009
console.log("Unzip failed: " + error);
997710010
}
997810011

10012+
//try JSONL
10013+
try
10014+
{
10015+
const lines = text.split('\n');
10016+
let temparrs = lines.filter(line => line.trim()).map(line => JSON.parse(line));
10017+
if(temparrs && temparrs.length>1 && temparrs[temparrs.length-1].mes && temparrs[temparrs.length-1].name)
10018+
{
10019+
load_tavern_jsonl(temparrs);
10020+
return;
10021+
}
10022+
}
10023+
catch(error)
10024+
{
10025+
console.log("JSONL import failed: " + error);
10026+
}
10027+
997910028
// 5. Fallback to plaintext if .txt
998010029
if (selectedFilename.endsWith(".txt")) {
998110030
msgboxYesNo(
@@ -10270,6 +10319,7 @@ Current version indicated by LITEVER below.
1027010319
let tmp_kai2 = localsettings.saved_kai_key;
1027110320
let tmp_a1111 = localsettings.saved_a1111_url;
1027210321
let tmp_comfy = localsettings.saved_comfy_url;
10322+
let tmp_comfy_bearer_token = localsettings.saved_comfy_bearer_token;
1027310323
let tmp_xtts = localsettings.saved_xtts_url;
1027410324
let tmp_imggen = localsettings.generate_images_mode;
1027510325
let tmp_mcp = localsettings.saved_mcp_urls;
@@ -10327,6 +10377,7 @@ Current version indicated by LITEVER below.
1032710377
localsettings.saved_kai_key = tmp_kai2;
1032810378
localsettings.saved_a1111_url = tmp_a1111;
1032910379
localsettings.saved_comfy_url = tmp_comfy;
10380+
localsettings.saved_comfy_bearer_token = tmp_comfy_bearer_token;
1033010381
localsettings.saved_xtts_url = tmp_xtts;
1033110382
localsettings.generate_images_mode = tmp_imggen;
1033210383
localsettings.saved_mcp_urls = tmp_mcp;
@@ -10614,6 +10665,68 @@ Current version indicated by LITEVER below.
1061410665
return false;
1061510666
}
1061610667

10668+
function load_tavern_jsonl(obj)
10669+
{
10670+
if(localsettings.opmode!=3 && localsettings.opmode!=4)
10671+
{
10672+
//force into instruct
10673+
localsettings.opmode = 4;
10674+
}
10675+
gametext_arr = [];
10676+
if(localsettings.opmode==3) //import as chat
10677+
{
10678+
if(localsettings.gui_type_chat!=3)
10679+
{
10680+
localsettings.gui_type_chat = 2;
10681+
}
10682+
for(let i=0;i<obj.length;++i)
10683+
{
10684+
let curr = obj[i];
10685+
if(curr.mes)
10686+
{
10687+
if(curr.name)
10688+
{
10689+
if(curr.is_user)
10690+
{
10691+
localsettings.chatname = curr.name;
10692+
}else{
10693+
localsettings.chatopponent = curr.name;
10694+
}
10695+
}
10696+
gametext_arr.push("\n"+curr.name+": "+curr.mes);
10697+
}
10698+
}
10699+
}
10700+
else //import as instruct
10701+
{
10702+
localsettings.gui_type_instruct = 2;
10703+
localsettings.inject_chatnames_instruct = true;
10704+
for(let i=0;i<obj.length;++i)
10705+
{
10706+
let curr = obj[i];
10707+
if(curr.mes)
10708+
{
10709+
if(curr.is_user)
10710+
{
10711+
if(curr.name)
10712+
{
10713+
localsettings.chatname = curr.name;
10714+
}
10715+
gametext_arr.push(get_instructstartplaceholder()+curr.name+": "+curr.mes);
10716+
}else{
10717+
if(curr.name)
10718+
{
10719+
localsettings.chatopponent = curr.name;
10720+
}
10721+
gametext_arr.push(get_instructendplaceholder()+curr.name+": "+curr.mes);
10722+
}
10723+
}
10724+
}
10725+
}
10726+
update_for_sidepanel();
10727+
render_gametext(true);
10728+
sync_multiplayer(true);
10729+
}
1061710730
function load_tavern_obj(obj)
1061810731
{
1061910732
let selectedgreeting = "";
@@ -22656,11 +22769,15 @@ Current version indicated by LITEVER below.
2265622769
if(comfyid && comfyid!="")
2265722770
{
2265822771
//comfyui polling
22772+
let json_headers = {'Content-Type': 'application/json'};
22773+
let headers={};
22774+
if (localsettings.saved_comfy_bearer_token != "") {
22775+
headers['Authorization'] = 'Bearer '+localsettings.saved_comfy_bearer_token;
22776+
json_headers['Authorization'] = 'Bearer '+localsettings.saved_comfy_bearer_token;
22777+
}
2265922778
fetch(localsettings.saved_comfy_url + comfy_history_endpoint + "/" + comfyid, {
2266022779
method: 'GET',
22661-
headers: {
22662-
'Content-Type': 'application/json',
22663-
}
22780+
headers: json_headers,
2266422781
})
2266522782
.then(x => x.json())
2266622783
.then(resp2 => {
@@ -22670,7 +22787,7 @@ Current version indicated by LITEVER below.
2267022787
img.done = true;
2267122788
let finalfilename = resp2[comfyid].outputs["9"].images[0].filename;
2267222789
//fetch final image
22673-
fetch(localsettings.saved_comfy_url + comfy_results_endpoint + finalfilename)
22790+
fetch(localsettings.saved_comfy_url + comfy_results_endpoint + finalfilename, {headers: headers})
2267422791
.then((response) => {
2267522792
return response.blob(); // Convert the response into a Blob
2267622793
})
@@ -29616,10 +29733,11 @@ Current version indicated by LITEVER below.
2961629733
<div id="generate_images_comfy_container" class="settinglabel hidden settingsbox">
2961729734
<div class="settinglabel" style="display: flex; width: 100%">
2961829735
<div class="justifyleft">Model</div>
29619-
<select title="Select Image Model" class="form-control push-right" id="generate_images_comfy_model" style="width:calc(100% - 58px)">
29736+
<select title="Select Image Model" class="form-control push-right" id="generate_images_comfy_model" style="width:calc(100% - 170px)">
2962029737
<option value="">[None]</option>
2962129738
</select>
2962229739
<button type="button" class="btn btn-primary" onclick="set_comfy_endpoint()" style="width:52px; padding: 2px 2px; margin-left: 3px; font-size:12px;">Set URL</button>
29740+
<button type="button" class="btn btn-primary" onclick="set_comfy_key()" style="width:52px; padding: 2px 2px; margin-left: 3px; font-size:12px;">Set Key</button>
2962329741
</div>
2962429742
<div class="settinglabel" style="display: flex; width: 100%">
2962529743
<div class="justifyleft">LoRA</div>

koboldcpp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@
7070
extra_images_max = 4 # for kontext/qwen img
7171

7272
# global vars
73-
KcppVersion = "1.109.1"
73+
KcppVersion = "1.109.2"
7474
showdebug = True
7575
kcpp_instance = None #global running instance
7676
global_memory = {"tunnel_url": "", "restart_target":"", "input_to_exit":False, "load_complete":False, "restart_override_config_target":""}

otherarch/qwen3tts/audio_tokenizer_decoder.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,7 @@ bool AudioTokenizerDecoder::load_model(const std::string & model_path) {
339339
upload_if_present(model_.vq_rest_codebook[i]);
340340
}
341341

342-
state_.backend = init_preferred_backend("AudioTokenizerDecoder", &error_msg_, true);
342+
state_.backend = init_preferred_backend("AudioTokenizerDecoder", &error_msg_, qwen3tts_allowgpu);
343343
if (!state_.backend) {
344344
return false;
345345
}

otherarch/qwen3tts/audio_tokenizer_encoder.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -249,11 +249,11 @@ bool AudioTokenizerEncoder::load_model(const std::string & model_path) {
249249
}
250250

251251
if (!load_tensor_data_from_file(model_path, gguf_ctx, model_.ctx,
252-
model_.tensors, model_.buffer, error_msg_, GGML_BACKEND_DEVICE_TYPE_GPU)) {
252+
model_.tensors, model_.buffer, error_msg_, qwen3tts_allowgpu?GGML_BACKEND_DEVICE_TYPE_GPU:GGML_BACKEND_DEVICE_TYPE_CPU)) {
253253
return false;
254254
}
255255

256-
state_.backend = init_preferred_backend("AudioTokenizerEncoder", &error_msg_, true);
256+
state_.backend = init_preferred_backend("AudioTokenizerEncoder", &error_msg_, qwen3tts_allowgpu);
257257
if (!state_.backend) {
258258
return false;
259259
}

otherarch/qwen3tts/gguf_loader.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@
99
#include <vector>
1010
#include <memory>
1111

12+
extern bool qwen3tts_allowgpu;
13+
1214
namespace qwen3_tts {
1315

1416
// Generic GGUF model loader class

otherarch/qwen3tts/tts_transformer.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ bool TTSTransformer::load_model(const std::string & model_path) {
9797
gguf_free(ctx);
9898
if (meta_ctx) ggml_free(meta_ctx);
9999

100-
state_.backend = init_preferred_backend("TTSTransformer", &error_msg_, true);
100+
state_.backend = init_preferred_backend("TTSTransformer", &error_msg_, qwen3tts_allowgpu);
101101
if (!state_.backend) {
102102
return false;
103103
}
@@ -553,7 +553,7 @@ bool TTSTransformer::create_tensors(struct gguf_context * ctx) {
553553
}
554554

555555
bool TTSTransformer::load_tensor_data(const std::string & path, struct gguf_context * ctx) {
556-
ggml_backend_t backend = init_preferred_backend("TTSTransformer", &error_msg_, true);
556+
ggml_backend_t backend = init_preferred_backend("TTSTransformer", &error_msg_, qwen3tts_allowgpu);
557557
if (!backend) {
558558
return false;
559559
}

otherarch/tts_adapter.cpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -501,6 +501,7 @@ static std::string detectedarch = "";
501501
//qwen3tts specific
502502
static bool is_qwen3tts_file = false;
503503
static qwen3_tts::Qwen3TTS qwen3tts_runner;
504+
bool qwen3tts_allowgpu = false;
504505

505506
int total_tts_gens = 0;
506507
static std::string tts_executable_path = "";
@@ -576,6 +577,7 @@ bool ttstype_load_model(const tts_load_model_inputs inputs)
576577
}
577578
else if(is_qwen3tts_file)
578579
{
580+
qwen3tts_allowgpu = (inputs.gpulayers>0?true:false);
579581
if (!qwen3tts_runner.load_models(modelfile_ttc,modelfile_cts)) {
580582
printf("\nQwen3TTS Load Error: %s\n", qwen3tts_runner.get_error().c_str());
581583
return false;

0 commit comments

Comments
 (0)