import torch
loader, optimizer, model, loss_fn = ...
swa_model = torch.optim.swa_utils.AveragedModel(model)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=300)
swa_start = 160
swa_scheduler = SWALR(optimizer, swa_lr=0.05)

for epoch in range(300):
   for input, target in loader:
       optimizer.zero_grad()
       loss_fn(model(input), target).backward()
       optimizer.step()
   if i > swa_start:
       swa_model.update_parameters(model)
       swa_scheduler.step()
   else:
       scheduler.step()

# Update bn statistics for the swa_model at the end
torch.optim.swa_utils.update_bn(loader, swa_model)
# Use swa_model to make predictions on test data
preds = swa_model(test_input)