You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2019/07/22 20:43:03 UTC

[GitHub] [incubator-mxnet] larroy commented on a change in pull request #15611: [MXNET-978] n-th order gradient test support

larroy commented on a change in pull request #15611: [MXNET-978] n-th order gradient test support
URL: https://github.com/apache/incubator-mxnet/pull/15611#discussion_r306023572
 
 

 ##########
 File path: tests/python/unittest/test_higher_order_grad.py
 ##########
 @@ -148,30 +165,48 @@ def grad_grad_op(x):
         shape = rand_shape_nd(dim)
         array = random_arrays(shape)
         check_second_order_unary(array, sigmoid, grad_grad_op)
+        # TODO(kshitij12345): Remove
+        check_nth_order_unary(array, sigmoid, [grad_op, grad_grad_op], [1, 2])
+        check_nth_order_unary(array, sigmoid, grad_grad_op, 2)
 
 
 def check_second_order_unary(x, op, grad_grad_op):
+    check_nth_order_unary(x, op, grad_grad_op, 2)
+
+
+def check_nth_order_unary(x, op, grad_ops, orders):
+    if isinstance(orders, int):
+        orders = [orders]
+        grad_ops = [grad_ops]
+
     x = nd.array(x)
-    grad_grad_x = grad_grad_op(x)
     x.attach_grad()
 
-    # Manual head_grads.
-    y_grad = nd.random.normal(shape=x.shape)
-    head_grad_grads = nd.random.normal(shape=x.shape)
+    order = max(orders)
+    expected_grads = [grad_op(x) for grad_op in grad_ops]
+    computed_grads = []
+    head_grads = []
 
     # Perform compute.
     with autograd.record():
         y = op(x)
-        x_grad = autograd.grad(heads=y, variables=x, head_grads=y_grad,
-                               create_graph=True, retain_graph=True)[0]
-    x_grad.backward(head_grad_grads)
-
-    # Compute expected values.
-    expected_grad_grad = grad_grad_x.asnumpy() * head_grad_grads.asnumpy() * \
-        y_grad.asnumpy()
-
-    # Validate the gradients.
-    assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
+        for current_order in range(1, order+1):
+            head_grad = nd.random.normal(shape=x.shape)
+            y = autograd.grad(heads=y, variables=x, head_grads=head_grad,
+                              create_graph=True, retain_graph=True)[0]
+            if current_order in orders:
 
 Review comment:
   If current_order is not in orders we might have problem zipping?  Is there a case where you wou want 1st and 3rd order but not second?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services