Skip to content

Commit 49d98fe

Browse files
authored
Merge branch 'main' into fix-lfs-pointer-problems
2 parents ff1f987 + 015da50 commit 49d98fe

1 file changed

Lines changed: 11 additions & 4 deletions

File tree

examples/dreambooth/train_dreambooth_lora_sd3.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1603,17 +1603,24 @@ def compute_text_embeddings(prompt, text_encoders, tokenizers):
16031603
free_memory()
16041604

16051605
# Scheduler and math around the number of training steps.
1606+
# Check the PR https://github.com/huggingface/diffusers/pull/8312 for detailed explanation.
16061607
overrode_max_train_steps = False
1607-
num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps)
1608+
num_warmup_steps_for_scheduler = args.lr_warmup_steps * accelerator.num_processes
16081609
if args.max_train_steps is None:
1609-
args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch
1610+
len_train_dataloader_after_sharding = math.ceil(len(train_dataloader) / accelerator.num_processes)
1611+
num_update_steps_per_epoch = math.ceil(len_train_dataloader_after_sharding / args.gradient_accumulation_steps)
1612+
num_training_steps_for_scheduler = (
1613+
args.num_train_epochs * accelerator.num_processes * num_update_steps_per_epoch
1614+
)
16101615
overrode_max_train_steps = True
1616+
else:
1617+
num_training_steps_for_scheduler = args.max_train_steps * accelerator.num_processes
16111618

16121619
lr_scheduler = get_scheduler(
16131620
args.lr_scheduler,
16141621
optimizer=optimizer,
1615-
num_warmup_steps=args.lr_warmup_steps * accelerator.num_processes,
1616-
num_training_steps=args.max_train_steps * accelerator.num_processes,
1622+
num_warmup_steps=num_warmup_steps_for_scheduler,
1623+
num_training_steps=num_training_steps_for_scheduler,
16171624
num_cycles=args.lr_num_cycles,
16181625
power=args.lr_power,
16191626
)

0 commit comments

Comments
 (0)