model = [Parameter(torch.randn(2, 2, requires_grad=True))]
optimizer = torch.optim.AdamW(model.parameters(), lr=learning_rate, weight_decay=0.01, amsgrad=False)
scheduler=torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[30,80], gamma=0.1, last_epoch=-1, verbose=False)
for input, target in dataset:
loss = loss_fn(output, target)