You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2020/01/27 18:57:41 UTC

[GitHub] [incubator-mxnet] ChaiBapchya commented on a change in pull request #17449: Implemented large tensor flag for opperf testing

ChaiBapchya commented on a change in pull request #17449: Implemented large tensor flag for opperf testing
URL: https://github.com/apache/incubator-mxnet/pull/17449#discussion_r371420014
 
 

 ##########
 File path: benchmark/opperf/nd_operations/nn_activation_operators.py
 ##########
 @@ -55,55 +55,106 @@ def run_activation_operators_benchmarks(ctx=mx.cpu(), dtype='float32', profiler=
     Dictionary of results. Key -> Name of the operator, Value -> Benchmark results.
 
     """
-    # Relu and its variation
-    relu_benchmark_res = run_performance_test([getattr(MX_OP_MODULE, "LeakyReLU")],
-                                              run_backward=True,
-                                              dtype=dtype,
-                                              ctx=ctx,
-                                              profiler=profiler,
-                                              inputs=[{"data": (1024, 1024), "act_type": "leaky", "slope": 0.1},
-                                                      {"data": (10000, 1), "act_type": "leaky", "slope": 0.1},
-                                                      {"data": (10000, 100), "act_type": "leaky", "slope": 0.1},
-                                                      {"data": (1024, 1024), "act_type": "elu", "slope": 0.1},
-                                                      {"data": (10000, 1), "act_type": "elu", "slope": 0.1},
-                                                      {"data": (10000, 100), "act_type": "elu", "slope": 0.1},
-                                                      {"data": (1024, 1024), "act_type": "selu"},
-                                                      {"data": (10000, 1), "act_type": "selu"},
-                                                      {"data": (10000, 100), "act_type": "selu"},
-                                                      {"data": (1024, 1024), "act_type": "prelu", "gamma": (1, 1024)},
-                                                      {"data": (10000, 1), "act_type": "prelu", "gamma": (1, 1)},
-                                                      {"data": (10000, 100), "act_type": "prelu", "gamma": (1, 100)}
-                                                      ],
-                                              warmup=warmup,
-                                              runs=runs)
+    if large_tensor == 'on':
+        # Relu and its variation
+        relu_benchmark_res = run_performance_test([getattr(MX_OP_MODULE, "LeakyReLU")],
+                                                run_backward=True,
+                                                dtype=dtype,
+                                                ctx=ctx,
+                                                profiler=profiler,
+                                                inputs=[{"data": (2**16, 2**16), "act_type": "leaky", "slope": 0.1},
+                                                        {"data": (2**4, 2**28), "act_type": "leaky", "slope": 0.1},
+                                                        {"data": (4, 2**30), "act_type": "leaky", "slope": 0.1},
+                                                        {"data": (2**16, 2**16), "act_type": "elu", "slope": 0.1},
+                                                        {"data": (2**4, 2**28), "act_type": "elu", "slope": 0.1},
+                                                        {"data": (4, 2**30), "act_type": "elu", "slope": 0.1},
+                                                        {"data": (2**16, 2**16), "act_type": "selu"},
+                                                        {"data": (2**4, 2**28), "act_type": "selu"},
+                                                        {"data": (4, 2**30), "act_type": "selu"},
+                                                        {"data": (2**16, 2**16), "act_type": "prelu", "gamma": (1, 2**16)},
+                                                        {"data": (2**4, 2**28), "act_type": "prelu", "gamma": (1, 2**28)},
+                                                        {"data": (4, 2**30), "act_type": "prelu", "gamma": (1, 2**30)}
+                                                        ],
+                                                warmup=warmup,
+                                                runs=runs)
 
-    # Sigmoid => Covered as part of Unary ops
-    # Hard_Sigmoid
-    hard_sigmoid_benchmark_res = run_performance_test([getattr(MX_OP_MODULE, "hard_sigmoid")],
-                                                      run_backward=True,
-                                                      dtype=dtype,
-                                                      ctx=ctx,
-                                                      profiler=profiler,
-                                                      inputs=[{"data": (1024, 1024), "alpha": 0.25, "beta": 0.5},
-                                                              {"data": (10000, 1), "alpha": 0.25, "beta": 0.5},
-                                                              {"data": (10000, 100), "alpha": 0.25, "beta": 0.5}
-                                                              ],
-                                                      warmup=warmup,
-                                                      runs=runs)
+        # Sigmoid => Covered as part of Unary ops
+        # Hard_Sigmoid
+        hard_sigmoid_benchmark_res = run_performance_test([getattr(MX_OP_MODULE, "hard_sigmoid")],
+                                                        run_backward=True,
 
 Review comment:
   nitpick: indent fix
   
   run `make pylint` to catch these issues

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services