You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by GitBox <gi...@apache.org> on 2021/11/16 17:19:45 UTC

[GitHub] [tvm] ekalda commented on a change in pull request #9515: [microNPU] Allow constants to be given as input to an operator

ekalda commented on a change in pull request #9515:
URL: https://github.com/apache/tvm/pull/9515#discussion_r750487693



##########
File path: tests/python/contrib/test_ethosu/test_codegen.py
##########
@@ -435,6 +435,56 @@ def representative_dataset():
     infra.verify_source(compiled_models, accel_type)
 
 
+@pytest.mark.parametrize("accel_type", ACCEL_TYPES)
+def test_binary_add_from_constant_scalar(accel_type):
+    dtype = "uint8"
+    ifm_shape = (1, 4, 4, 8)
+
+    def create_relay_graph():
+        inp = relay.var("input", shape=ifm_shape, dtype=dtype)
+        scalar = relay.const(np.ones((1, 1, 1, 1), dtype=dtype), dtype=dtype)
+        add = relay.qnn.op.add(
+            inp,
+            scalar,
+            relay.const(1.0, dtype="float32"),
+            relay.const(0, dtype="int32"),
+            relay.const(1.0, dtype="float32"),
+            relay.const(0, dtype="int32"),
+            relay.const(1.0, dtype="float32"),
+            relay.const(0, dtype="int32"),
+        )
+        func = relay.Function(relay.analysis.free_vars(add), add)

Review comment:
       Is there a reason we start from Relay there instead of TFLite? 

##########
File path: tests/python/contrib/test_ethosu/test_encode_constants.py
##########
@@ -270,5 +273,47 @@ def _get_func():
     assert reference_const_sizes == test_const_sizes
 
 
+def test_constant_as_input():
+    """Test to check that constants specified as inputs aren't
+    interpreted as an encoded constant."""
+
+    def get_graph():
+        dtype = "uint8"

Review comment:
       Why does the constant need to be`uint8`? (just asking for enlightenment)

##########
File path: tests/python/contrib/test_ethosu/test_codegen.py
##########
@@ -435,6 +435,56 @@ def representative_dataset():
     infra.verify_source(compiled_models, accel_type)
 
 
+@pytest.mark.parametrize("accel_type", ACCEL_TYPES)
+def test_binary_add_from_constant_scalar(accel_type):
+    dtype = "uint8"
+    ifm_shape = (1, 4, 4, 8)
+
+    def create_relay_graph():
+        inp = relay.var("input", shape=ifm_shape, dtype=dtype)
+        scalar = relay.const(np.ones((1, 1, 1, 1), dtype=dtype), dtype=dtype)
+        add = relay.qnn.op.add(
+            inp,
+            scalar,
+            relay.const(1.0, dtype="float32"),
+            relay.const(0, dtype="int32"),
+            relay.const(1.0, dtype="float32"),
+            relay.const(0, dtype="int32"),
+            relay.const(1.0, dtype="float32"),
+            relay.const(0, dtype="int32"),
+        )
+        func = relay.Function(relay.analysis.free_vars(add), add)
+        return tvm.IRModule.from_expr(func)
+
+    mod = create_relay_graph()
+    partitioned_mod = partition_for_ethosu(mod)
+
+    # Generate reference data
+    input_data = {"input": np.random.randint(low=0, high=255, size=ifm_shape, dtype=dtype)}
+    output_data = generate_ref_data(mod, input_data)
+
+    compiled_models = infra.build_source(
+        partitioned_mod,
+        input_data,
+        output_data,
+        accel_type,
+        output_tolerance=0,
+    )
+
+    # Assumes only two runtime.Modules are created -- i.e. single offload module
+    imported_modules = compiled_models[0].executor_factory.lib.imported_modules
+    assert len(imported_modules) == 2
+    ethosu_module = imported_modules[0]
+
+    # Verify generated C source
+    get_cs = tvm._ffi.get_global_func("runtime.module.ethosu.getcs")

Review comment:
       ```suggestion
       get_cs = tvm._ffi.get_global_func("runtime.module.ethos-u.getcs")
   ```
   Looks like 'ethos-u' is all the rage now :) 




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@tvm.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org