Skip to content

Commit 77f8cf8

Browse files
yadferhadsayakpaul
andauthored
Fix Flux2 non-diffusers guidance LoRA conversion (#13486)
* Fix Flux2 LoRA guidance conversion * Handle expanded Flux2 LoRA block names * Address Flux2 PR review feedback --------- Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>
1 parent 656da84 commit 77f8cf8

1 file changed

Lines changed: 16 additions & 0 deletions

File tree

src/diffusers/loaders/lora_conversion_utils.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2331,6 +2331,20 @@ def _convert_non_diffusers_flux2_lora_to_diffusers(state_dict):
23312331
temp_state_dict[new_key] = v
23322332
original_state_dict = temp_state_dict
23332333

2334+
# Some Flux2 checkpoints skip the ai-toolkit `single_blocks` / `double_blocks`
2335+
# layout and already store expanded diffusers block names. Accept those
2336+
# directly, and normalize the legacy `sformer_blocks` alias used by some exports.
2337+
possible_expanded_block_prefixes = {
2338+
"single_transformer_blocks.": "single_transformer_blocks.",
2339+
"transformer_blocks.": "transformer_blocks.",
2340+
"sformer_blocks.": "transformer_blocks.",
2341+
}
2342+
for key in list(original_state_dict.keys()):
2343+
for source_prefix, target_prefix in possible_expanded_block_prefixes.items():
2344+
if key.startswith(source_prefix):
2345+
converted_state_dict[target_prefix + key[len(source_prefix) :]] = original_state_dict.pop(key)
2346+
break
2347+
23342348
num_double_layers = 0
23352349
num_single_layers = 0
23362350
for key in original_state_dict.keys():
@@ -2421,6 +2435,8 @@ def _convert_non_diffusers_flux2_lora_to_diffusers(state_dict):
24212435
"txt_in": "context_embedder",
24222436
"time_in.in_layer": "time_guidance_embed.timestep_embedder.linear_1",
24232437
"time_in.out_layer": "time_guidance_embed.timestep_embedder.linear_2",
2438+
"guidance_in.in_layer": "time_guidance_embed.guidance_embedder.linear_1",
2439+
"guidance_in.out_layer": "time_guidance_embed.guidance_embedder.linear_2",
24242440
"final_layer.linear": "proj_out",
24252441
"final_layer.adaLN_modulation.1": "norm_out.linear",
24262442
"single_stream_modulation.lin": "single_stream_modulation.linear",

0 commit comments

Comments
 (0)