Skip to content
Snippets Groups Projects
Commit eccaeef8 authored by Riko Corwin Uphoff's avatar Riko Corwin Uphoff
Browse files

Added scheduling

parent 66d40b4e
No related branches found
No related tags found
No related merge requests found
Pipeline #25390 passed
from torch.optim.lr_scheduler import LinearLR
from load_data import load_data
from load_models import get_model
from load_optimizers import get_optimizer
......@@ -85,9 +87,10 @@ if __name__ == "__main__":
num_batches = len(dataloader) if args.mode == "finetuning" else ceil(args.num_training_tokens / args.batch_size)
num_steps = args.num_epochs * num_batches
scheduler = get_scheduler(
optimizer, args.lr_scheduler, args.warm_up_fraction, num_steps, args.lr, args.lr_min
)
# scheduler = get_scheduler(
# optimizer, args.lr_scheduler, args.warm_up_fraction, num_steps, args.lr, args.lr_min
# )
scheduler = LinearLR(optimizer, args.lr, args.lr_min, num_steps) # TODO
trained_model = train(device, accelerator, scheduler, model, optimizer, dataloader, num_epochs=args.num_epochs)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment