diff --git a/beginner_source/ptcheat.rst b/beginner_source/ptcheat.rst index 91a0586618..49f9c9f595 100644 --- a/beginner_source/ptcheat.rst +++ b/beginner_source/ptcheat.rst @@ -22,27 +22,12 @@ Neural Network API import torch.nn as nn # neural networks import torch.nn.functional as F # layers, activations and more import torch.optim as optim # optimizers e.g. gradient descent, ADAM, etc. - from torch.jit import script, trace # hybrid frontend decorator and tracing jit See `autograd `__, `nn `__, `functional `__ and `optim `__ -TorchScript and JIT -------------------- - -.. code-block:: python - - torch.jit.trace() # takes your module or function and an example - # data input, and traces the computational steps - # that the data encounters as it progresses through the model - - @script # decorator used to indicate data-dependent - # control flow within the code being traced - -See `Torchscript `__ - ONNX ---- @@ -225,8 +210,10 @@ Optimizers opt = optim.x(model.parameters(), ...) # create optimizer opt.step() # update weights - optim.X # where X is SGD, Adadelta, Adagrad, Adam, - # AdamW, SparseAdam, Adamax, ASGD, + opt.zero_grad() # clear the gradients + optim.X # where X is SGD, AdamW, Adam, + # Adafactor, NAdam, RAdam, Adadelta, + # Adagrad, SparseAdam, Adamax, ASGD, # LBFGS, RMSprop or Rprop See `optimizers `__