Skip to content

Commit 5910a1c

Browse files
Fixing Kohya loras loading: Flux.1-dev loras with TE ("lora_te1_" prefix) (#13188)
* fixing text encoder lora loading * following Cursor's review
1 parent 40e9645 commit 5910a1c

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/diffusers/loaders/lora_conversion_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -856,7 +856,7 @@ def _convert(original_key, diffusers_key, state_dict, new_state_dict):
856856
)
857857
state_dict = {k: v for k, v in state_dict.items() if not k.startswith("text_encoders.t5xxl.transformer.")}
858858

859-
has_diffb = any("diff_b" in k and k.startswith(("lora_unet_", "lora_te_")) for k in state_dict)
859+
has_diffb = any("diff_b" in k and k.startswith(("lora_unet_", "lora_te_", "lora_te1_")) for k in state_dict)
860860
if has_diffb:
861861
zero_status_diff_b = state_dict_all_zero(state_dict, ".diff_b")
862862
if zero_status_diff_b:
@@ -895,7 +895,7 @@ def _convert(original_key, diffusers_key, state_dict, new_state_dict):
895895
state_dict = {
896896
_custom_replace(k, limit_substrings): v
897897
for k, v in state_dict.items()
898-
if k.startswith(("lora_unet_", "lora_te_"))
898+
if k.startswith(("lora_unet_", "lora_te_", "lora_te1_"))
899899
}
900900

901901
if any("text_projection" in k for k in state_dict):

0 commit comments

Comments
 (0)