You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@tvm.apache.org by zh...@apache.org on 2020/09/05 22:06:50 UTC
[incubator-tvm] branch master updated: [TARGET] Add
layout_transform, clip and expand_dims in onnx converter (#6366)
This is an automated email from the ASF dual-hosted git repository.
zhic pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-tvm.git
The following commit(s) were added to refs/heads/master by this push:
new 9aa69e2 [TARGET] Add layout_transform, clip and expand_dims in onnx converter (#6366)
9aa69e2 is described below
commit 9aa69e2e14a75962faaf352b639c9daf53fc2222
Author: Xingyu Zhou <zh...@amazon.com>
AuthorDate: Sat Sep 5 15:06:35 2020 -0700
[TARGET] Add layout_transform, clip and expand_dims in onnx converter (#6366)
* Add layout_transform, clip and expand_dims in onnx converter
* remove _add_input and address comments
* address comments
---
python/tvm/contrib/target/onnx.py | 148 +++++++++++++++++++++++++++++++-------
tests/python/contrib/test_onnx.py | 35 +++++++++
2 files changed, 157 insertions(+), 26 deletions(-)
diff --git a/python/tvm/contrib/target/onnx.py b/python/tvm/contrib/target/onnx.py
index 7f6945a..25e9fd4 100644
--- a/python/tvm/contrib/target/onnx.py
+++ b/python/tvm/contrib/target/onnx.py
@@ -64,12 +64,14 @@ def call_node_infer_type(node):
return types
-def add_input(data, name, model_container):
+def add_input(data, name, prefix, model_container):
+ input_name = '{}_{}'.format(prefix, name)
dtype = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[data.dtype]
- tensor_value_info = onnx.helper.make_tensor_value_info(name, dtype, shape=data.shape)
+ tensor_value_info = onnx.helper.make_tensor_value_info(input_name, dtype, shape=data.shape)
model_container.add_inputs([tensor_value_info])
- data_tensor = numpy_helper.from_array(data, name)
+ data_tensor = numpy_helper.from_array(data, input_name)
model_container.add_initializers([data_tensor])
+ return input_name
class OpConverter(object):
@@ -111,14 +113,16 @@ class Reshape(object):
Relay operator accepts shape as attribute but ONNX operator
accepts it as a input.
"""
-
+ name = node_entry['name']
shape = numpy.asarray([a.value for a in node_entry['relay_node'].attrs.newshape],
dtype=numpy.int64)
- input_name = 'shape{}'.format(node_entry['name'])
- node = onnx.helper.make_node(cls.__name__, [node_entry['input_names'][0], input_name],
+
+ input_names = [node_entry['input_names'][0],
+ add_input(shape, name, 'shape', model_container)]
+
+ node = onnx.helper.make_node(cls.__name__, input_names,
node_entry['output_names'])
model_container.add_nodes([node])
- add_input(shape, input_name, model_container)
class Conv(OpConverter):
@@ -349,13 +353,12 @@ class Pad(OpConverter):
name = node_entry['name']
data = numpy.asarray(attrs['pads'], dtype=attrs['pads'][0].dtype).astype(numpy.int64)
- input_name = 'pads_{}'.format(name)
value = numpy.dtype(node_entry['types'][0].dtype).type(attrs['constant_value'])
- input_value_name = 'value_{}'.format(name)
- add_input(data, input_name, model_container)
- add_input(value, input_value_name, model_container)
- input_names = [node_entry['input_names'][0], input_name, input_value_name]
+ input_names = [node_entry['input_names'][0],
+ add_input(data, name, 'pads', model_container),
+ add_input(value, name, 'value', model_container)]
+
node = onnx.helper.make_node(cls.__name__, input_names, node_entry['output_names'])
model_container.add_nodes([node])
@@ -440,17 +443,16 @@ class Slice(OpConverter):
else:
steps += [1] * (len(shape) - len(steps))
- def _add_input(val, input_name):
- val_arr = numpy.asarray(val).astype(numpy.int64)
- input_name = '{}_{}'.format(name, input_name)
- add_input(val_arr, input_name, model_container)
- return input_name
+ starts = numpy.asarray(starts).astype(numpy.int64)
+ ends = numpy.asarray(ends).astype(numpy.int64)
+ axes = numpy.asarray(axes).astype(numpy.int64)
+ steps = numpy.asarray(steps).astype(numpy.int64)
input_names = []
- input_names.append(_add_input(starts, 'starts'))
- input_names.append(_add_input(ends, 'ends'))
- input_names.append(_add_input(axes, 'axes'))
- input_names.append(_add_input(steps, 'steps'))
+ input_names.append(add_input(starts, name, 'starts', model_container))
+ input_names.append(add_input(ends, name, 'ends', model_container))
+ input_names.append(add_input(axes, name, 'axes', model_container))
+ input_names.append(add_input(steps, name, 'steps', model_container))
input_names = [node_entry['input_names'][0]] + input_names
@@ -511,6 +513,94 @@ class Split(OpConverter):
model_container.add_nodes([slice_node])
+class LayoutTransform(OpConverter):
+ """ Operator converter for Layouttransform
+ """
+
+ @classmethod
+ def convert_attributes(cls, attrs):
+ src_layout = attrs.get_str("src_layout")
+ dst_layout = attrs.get_str("dst_layout")
+
+ perm = [src_layout.index(c) for c in dst_layout]
+ return {'perm': tuple(perm)}
+
+ @classmethod
+ def convert(cls, node_entry, model_container, node_dict):
+ attrs = cls.convert_attributes(node_entry['relay_node'].attrs)
+ onnx_node = onnx.helper.make_node("Transpose",
+ node_entry['input_names'],
+ node_entry['output_names'],
+ **attrs)
+ model_container.add_nodes([onnx_node])
+
+
+class Clip(OpConverter):
+ """ Operator converter for Clip.
+ """
+
+ @classmethod
+ def convert_attributes(cls, attrs):
+ return {
+ 'min': attrs.a_min,
+ 'max': attrs.a_max
+ }
+
+ @classmethod
+ def convert(cls, node_entry, model_container, node_dict):
+ attrs = cls.convert_attributes(node_entry['relay_node'].attrs)
+
+ name = node_entry['name']
+
+ min_val = numpy.asarray(attrs['min']).astype(numpy.float32)
+ max_val = numpy.asarray(attrs['max']).astype(numpy.float32)
+
+ input_names = []
+ input_names.append(add_input(min_val, name, 'min', model_container))
+ input_names.append(add_input(max_val, name, 'max', model_container))
+
+ input_names = [node_entry['input_names'][0]] + input_names
+
+ node = onnx.helper.make_node(cls.__name__, input_names, node_entry['output_names'])
+ model_container.add_nodes([node])
+
+
+class Expand(OpConverter):
+ """ Operator converter for Expand_dims.
+ """
+
+ @classmethod
+ def convert_attributes(cls, attrs):
+ return {
+ 'axis': attrs.axis,
+ 'num_newaxis': attrs.num_newaxis
+ }
+
+ @classmethod
+ def convert(cls, node_entry, model_container, node_dict):
+ attrs = cls.convert_attributes(node_entry['relay_node'].attrs)
+
+ name = node_entry['name']
+
+ input_node = node_dict[node_entry['inputs'][0]]
+ assert len(input_node) == 1, "input node_entry can not be a Tuple"
+ input_node = input_node[0]
+ data_shape = input_node['types'][0].shape
+ new_shape = list(data_shape)
+
+ for _ in range(attrs['num_newaxis']):
+ new_shape.insert(attrs['axis'], 1)
+
+ new_shape = numpy.asarray(new_shape).astype(numpy.int64)
+ input_names = []
+ input_names.append(add_input(new_shape, name, 'shape', model_container))
+
+ input_names = [node_entry['input_names'][0]] + input_names
+
+ node = onnx.helper.make_node(cls.__name__, input_names, node_entry['output_names'])
+ model_container.add_nodes([node])
+
+
class ConstantOfShapeZeros(OpConverter):
""" Operator converter for ConstantOfShape.
"""
@@ -528,17 +618,20 @@ class ConstantOfShapeZeros(OpConverter):
assert len(input_node) == 1, "input node can not be a Tuple"
input_node = input_node[0]
dtype = input_node['types'][0].dtype
- input_shape_name = 'shape_{}'.format(node_entry['name'])
+
+ name = node_entry['name']
shape = [val.value for val in input_node['types'][0].shape]
shape = numpy.asarray(shape).astype(numpy.int64)
- add_input(shape, input_shape_name, model_container)
+
+ input_names = []
+ input_names.append(add_input(shape, name, 'shape', model_container))
dtype = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[numpy.dtype(dtype)]
tensor_value = onnx.helper.make_tensor("value", dtype,
[1], [attrs['value']])
node = onnx.helper.make_node('ConstantOfShape',
- [input_shape_name],
+ input_names,
node_entry['output_names'],
value=tensor_value)
model_container.add_nodes([node])
@@ -584,7 +677,10 @@ relay_to_onnx_op_mapping = {
'ones_like': ConstantOfShapeOnes,
'subtract': rename('Sub'),
'split': Split,
- 'exp': rename('Exp')
+ 'exp': rename('Exp'),
+ 'layout_transform': LayoutTransform,
+ 'clip': Clip,
+ 'expand_dims': Expand
}
@@ -670,7 +766,7 @@ class RelayToONNXConverter(ExprVisitor):
"input_names": [name], # input names in case of call nodes else self name
"output_names": [name], # output names in case of call nodes else self name
"op": None, # op name in case of call node else None
- }
+ }
def convert_to_onnx(self, func):
""" Traverse Relay graph and generate a ONNX model"""
diff --git a/tests/python/contrib/test_onnx.py b/tests/python/contrib/test_onnx.py
index 76b6bab..ccc122f 100644
--- a/tests/python/contrib/test_onnx.py
+++ b/tests/python/contrib/test_onnx.py
@@ -448,6 +448,38 @@ def test_tuple_types():
verify_tuple_types((5, 5, 2, 2), [1, 3, 4], axis=0)
verify_tuple_types((5, 5, 2, 2), [1, 3, 4], axis=1)
+def test_layout_transform():
+ def verify_layout_transform(dshape, src_layout, dst_layout, dtype="float32"):
+ x = relay.var("x", relay.ty.TensorType(dshape, dtype))
+ y = relay.layout_transform(x, src_layout, dst_layout)
+ func = relay.Function([x], y)
+ x_data = np.random.uniform(size=dshape).astype(dtype)
+ verify_results(func, [x_data], 'test_layout_transform', rtol=1e-5, atol=1e-5)
+
+ verify_layout_transform((1, 3, 8, 8), 'NCHW', 'NHWC')
+ verify_layout_transform((1, 8, 8, 3), 'NHWC', 'NCHW')
+
+def test_clip():
+ def verify_clip(dshape, a_min, a_max, dtype="float32"):
+ x = relay.var("x", relay.ty.TensorType(dshape, dtype))
+ y = relay.clip(x, a_min, a_max)
+ func = relay.Function([x], y)
+ x_data = np.random.uniform(size=dshape).astype(dtype)
+ verify_results(func, [x_data], 'test_clip', rtol=1e-5, atol=1e-5)
+
+ verify_clip((5, 5, 2, 5), 0, 0.2)
+ verify_clip((5, 5, 2, 5), 0.2, 0.5)
+
+def test_expand_dims():
+ def verify_expand_dims(dshape, axis, num_newaxis, dtype="float32"):
+ x = relay.var("x", relay.ty.TensorType(dshape, dtype))
+ y = relay.expand_dims(x, axis, num_newaxis)
+ func = relay.Function([x], y)
+ x_data = np.random.uniform(size=dshape).astype(dtype)
+ verify_results(func, [x_data], 'test_expand_dims', rtol=1e-5, atol=1e-5)
+
+ verify_expand_dims((1, 1001), 0, 2)
+ verify_expand_dims((1, 1, 1001), 2, 2)
if __name__ == '__main__':
test_add()
@@ -469,3 +501,6 @@ if __name__ == '__main__':
test_cmp_type()
test_binary_op()
test_tuple_types()
+ test_layout_transform()
+ test_clip()
+ test_expand_dims()