@@ -214,7 +214,7 @@ def load_lora_weights(
214214
215215 is_correct_format = all ("lora" in key for key in state_dict .keys ())
216216 if not is_correct_format :
217- raise ValueError ("Invalid LoRA checkpoint." )
217+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
218218
219219 self .load_lora_into_unet (
220220 state_dict ,
@@ -641,7 +641,7 @@ def load_lora_weights(
641641
642642 is_correct_format = all ("lora" in key for key in state_dict .keys ())
643643 if not is_correct_format :
644- raise ValueError ("Invalid LoRA checkpoint." )
644+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
645645
646646 self .load_lora_into_unet (
647647 state_dict ,
@@ -1081,7 +1081,7 @@ def load_lora_weights(
10811081
10821082 is_correct_format = all ("lora" in key for key in state_dict .keys ())
10831083 if not is_correct_format :
1084- raise ValueError ("Invalid LoRA checkpoint." )
1084+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
10851085
10861086 self .load_lora_into_transformer (
10871087 state_dict ,
@@ -1377,7 +1377,7 @@ def load_lora_weights(
13771377
13781378 is_correct_format = all ("lora" in key for key in state_dict .keys ())
13791379 if not is_correct_format :
1380- raise ValueError ("Invalid LoRA checkpoint." )
1380+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
13811381
13821382 self .load_lora_into_transformer (
13831383 state_dict ,
@@ -1659,7 +1659,7 @@ def load_lora_weights(
16591659 )
16601660
16611661 if not (has_lora_keys or has_norm_keys ):
1662- raise ValueError ("Invalid LoRA checkpoint." )
1662+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
16631663
16641664 transformer_lora_state_dict = {
16651665 k : state_dict .get (k )
@@ -2506,7 +2506,7 @@ def load_lora_weights(
25062506
25072507 is_correct_format = all ("lora" in key for key in state_dict .keys ())
25082508 if not is_correct_format :
2509- raise ValueError ("Invalid LoRA checkpoint." )
2509+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
25102510
25112511 self .load_lora_into_transformer (
25122512 state_dict ,
@@ -2703,7 +2703,7 @@ def load_lora_weights(
27032703
27042704 is_correct_format = all ("lora" in key for key in state_dict .keys ())
27052705 if not is_correct_format :
2706- raise ValueError ("Invalid LoRA checkpoint." )
2706+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
27072707
27082708 self .load_lora_into_transformer (
27092709 state_dict ,
@@ -2906,7 +2906,7 @@ def load_lora_weights(
29062906
29072907 is_correct_format = all ("lora" in key for key in state_dict .keys ())
29082908 if not is_correct_format :
2909- raise ValueError ("Invalid LoRA checkpoint." )
2909+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
29102910
29112911 self .load_lora_into_transformer (
29122912 state_dict ,
@@ -3115,7 +3115,7 @@ def load_lora_weights(
31153115
31163116 is_correct_format = all ("lora" in key for key in state_dict .keys ())
31173117 if not is_correct_format :
3118- raise ValueError ("Invalid LoRA checkpoint." )
3118+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
31193119
31203120 transformer_peft_state_dict = {
31213121 k : v for k , v in state_dict .items () if k .startswith (f"{ self .transformer_name } ." )
@@ -3333,7 +3333,7 @@ def load_lora_weights(
33333333
33343334 is_correct_format = all ("lora" in key for key in state_dict .keys ())
33353335 if not is_correct_format :
3336- raise ValueError ("Invalid LoRA checkpoint." )
3336+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
33373337
33383338 self .load_lora_into_transformer (
33393339 state_dict ,
@@ -3536,7 +3536,7 @@ def load_lora_weights(
35363536
35373537 is_correct_format = all ("lora" in key for key in state_dict .keys ())
35383538 if not is_correct_format :
3539- raise ValueError ("Invalid LoRA checkpoint." )
3539+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
35403540
35413541 self .load_lora_into_transformer (
35423542 state_dict ,
@@ -3740,7 +3740,7 @@ def load_lora_weights(
37403740
37413741 is_correct_format = all ("lora" in key for key in state_dict .keys ())
37423742 if not is_correct_format :
3743- raise ValueError ("Invalid LoRA checkpoint." )
3743+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
37443744
37453745 self .load_lora_into_transformer (
37463746 state_dict ,
@@ -3940,7 +3940,7 @@ def load_lora_weights(
39403940
39413941 is_correct_format = all ("lora" in key for key in state_dict .keys ())
39423942 if not is_correct_format :
3943- raise ValueError ("Invalid LoRA checkpoint." )
3943+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
39443944
39453945 self .load_lora_into_transformer (
39463946 state_dict ,
@@ -4194,7 +4194,7 @@ def load_lora_weights(
41944194 )
41954195 is_correct_format = all ("lora" in key for key in state_dict .keys ())
41964196 if not is_correct_format :
4197- raise ValueError ("Invalid LoRA checkpoint." )
4197+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
41984198
41994199 load_into_transformer_2 = kwargs .pop ("load_into_transformer_2" , False )
42004200 if load_into_transformer_2 :
@@ -4471,7 +4471,7 @@ def load_lora_weights(
44714471 )
44724472 is_correct_format = all ("lora" in key for key in state_dict .keys ())
44734473 if not is_correct_format :
4474- raise ValueError ("Invalid LoRA checkpoint." )
4474+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
44754475
44764476 load_into_transformer_2 = kwargs .pop ("load_into_transformer_2" , False )
44774477 if load_into_transformer_2 :
@@ -4691,7 +4691,7 @@ def load_lora_weights(
46914691
46924692 is_correct_format = all ("lora" in key for key in state_dict .keys ())
46934693 if not is_correct_format :
4694- raise ValueError ("Invalid LoRA checkpoint." )
4694+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
46954695
46964696 self .load_lora_into_transformer (
46974697 state_dict ,
@@ -4894,7 +4894,7 @@ def load_lora_weights(
48944894
48954895 is_correct_format = all ("lora" in key for key in state_dict .keys ())
48964896 if not is_correct_format :
4897- raise ValueError ("Invalid LoRA checkpoint." )
4897+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
48984898
48994899 self .load_lora_into_transformer (
49004900 state_dict ,
@@ -5100,7 +5100,7 @@ def load_lora_weights(
51005100
51015101 is_correct_format = all ("lora" in key for key in state_dict .keys ())
51025102 if not is_correct_format :
5103- raise ValueError ("Invalid LoRA checkpoint." )
5103+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
51045104
51055105 self .load_lora_into_transformer (
51065106 state_dict ,
@@ -5306,7 +5306,7 @@ def load_lora_weights(
53065306
53075307 is_correct_format = all ("lora" in key for key in state_dict .keys ())
53085308 if not is_correct_format :
5309- raise ValueError ("Invalid LoRA checkpoint." )
5309+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
53105310
53115311 self .load_lora_into_transformer (
53125312 state_dict ,
@@ -5509,7 +5509,7 @@ def load_lora_weights(
55095509
55105510 is_correct_format = all ("lora" in key for key in state_dict .keys ())
55115511 if not is_correct_format :
5512- raise ValueError ("Invalid LoRA checkpoint." )
5512+ raise ValueError ("Invalid LoRA checkpoint. Make sure all LoRA param names contain `'lora'` substring. " )
55135513
55145514 self .load_lora_into_transformer (
55155515 state_dict ,
0 commit comments