Skip to content

optimizer

Functions

fastvideo.train.utils.optimizer.build_optimizer_and_scheduler

build_optimizer_and_scheduler(*, params: list[Parameter], optimizer_config: OptimizerConfig, loop_config: TrainingLoopConfig, learning_rate: float, betas: tuple[float, float], scheduler_name: str) -> tuple[Optimizer, object]

Build an AdamW optimizer and LR scheduler.

Returns (optimizer, lr_scheduler) so the caller can store them as method-level attributes.

Source code in fastvideo/train/utils/optimizer.py
def build_optimizer_and_scheduler(
    *,
    params: list[torch.nn.Parameter],
    optimizer_config: OptimizerConfig,
    loop_config: TrainingLoopConfig,
    learning_rate: float,
    betas: tuple[float, float],
    scheduler_name: str,
) -> tuple[torch.optim.Optimizer, object]:
    """Build an AdamW optimizer and LR scheduler.

    Returns ``(optimizer, lr_scheduler)`` so the caller can store them
    as method-level attributes.
    """
    if not params:
        raise ValueError("No trainable parameters passed to "
                         "build_optimizer_and_scheduler")

    optimizer = torch.optim.AdamW(
        params,
        lr=float(learning_rate),
        betas=betas,
        weight_decay=float(optimizer_config.weight_decay),
        eps=1e-8,
    )

    scheduler = get_scheduler(
        str(scheduler_name),
        optimizer=optimizer,
        num_warmup_steps=int(optimizer_config.lr_warmup_steps),
        num_training_steps=int(loop_config.max_train_steps),
        num_cycles=int(optimizer_config.lr_num_cycles),
        power=float(optimizer_config.lr_power),
        min_lr_ratio=float(optimizer_config.min_lr_ratio),
        last_epoch=-1,
    )

    return optimizer, scheduler