Accumulate Gradient Trainer

d_grads, g_grads = self.calculate_gradients()
if accumulated_count == config.accumulate:
    self.train_g(average_g_grads)
    accumulated_count = 0
else:
    train_d(d_grads)
    average_g_grads += g_grads / config.accumulate
    accumulated_count += 1

examples

{
  "class": "class:hypergan.trainers.accumulate_gradient_trainer.AccumulateGradientTrainer",
  "accumulate": 10,
  "d_optimizer": {
    "class": "class:torch.optim.Adam",
    "lr": 1e-4,
    "betas":[0.0,0.999]
  },
  "g_optimizer": {
    "class": "class:torch.optim.Adam",
    "lr": 1e-4,
    "betas":[0.0,0.999]
  },
  "hooks": [
    {
      "class": "function:hypergan.train_hooks.adversarial_norm_train_hook.AdversarialNormTrainHook",
      "gamma": 1e3,
      "loss": ["d"]
    }
  ]
}

options

Last updated