#!/usr/bin/env python # coding: utf-8 # # Automatic Differentiation # In[1]: from mxnet import autograd, np, npx npx.set_np() x = np.arange(4) x # Allocate space to store the gradient with respect to ``x``. # In[2]: x.attach_grad() # Record the computation within the `record` scope. # In[3]: with autograd.record(): y = 2.0 * np.dot(x, x) y # The gradient of the function $y = 2\mathbf{x}^{\top}\mathbf{x}$ with respect to $\mathbf{x}$ should be $4\mathbf{x}$. # In[4]: y.backward() x.grad - 4 * x