From 85d0169c28e67bb101ce0d31a1e5c56c13e8b1b6 Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Thu, 16 Feb 2023 22:08:16 +0800 Subject: [PATCH 1/9] bug fix --- oneflow_onnx/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/oneflow_onnx/util.py b/oneflow_onnx/util.py index 387d37d..92f5360 100644 --- a/oneflow_onnx/util.py +++ b/oneflow_onnx/util.py @@ -71,7 +71,7 @@ onnx_pb.TensorProto.UINT16: np.uint16, onnx_pb.TensorProto.INT64: np.int64, onnx_pb.TensorProto.UINT64: np.uint64, - onnx_pb.TensorProto.BOOL: np.bool, + onnx_pb.TensorProto.BOOL: np.bool_, } # From 8d14da0ed496ed91a1c25068b6f0d4a0cf22419e Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Fri, 17 Feb 2023 12:30:20 +0800 Subject: [PATCH 2/9] first implement --- oneflow_onnx/oneflow2onnx/handlers/array.py | 63 +++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/oneflow_onnx/oneflow2onnx/handlers/array.py b/oneflow_onnx/oneflow2onnx/handlers/array.py index a22d49f..3501fa9 100644 --- a/oneflow_onnx/oneflow2onnx/handlers/array.py +++ b/oneflow_onnx/oneflow2onnx/handlers/array.py @@ -241,6 +241,69 @@ def Version_11(cls, ctx, node, **kwargs): # Opset 11 supports negative axis, but core logic is same cls.Version_1(ctx, node, **kwargs) +@flow_op("concat", "Concat") +class Concat: + @classmethod + def Version_1(cls, ctx, node, **kwargs): + # old concat op has axis as input[0] + axis_val = node.attrs.get("axis", None) + + if axis_val < 0: + input_shape = ctx.get_shape(node.input_tensor_names[0]) + axis_val = len(input_shape) + axis_val + node.attrs["axis"] = axis_val + + if ctx.opset < 8: + # opset < 8: might need to wrap concat in casts since only float is supported + _WrapConcatWithCast(ctx, node) + return + + @classmethod + def Version_11(cls, ctx, node, **kwargs): + # Opset 11 supports negative axis, but core logic is same + cls.Version_1(ctx, node, **kwargs) + +@flow_op("stack", "ConcatFromSequence") +class Stack: + @classmethod + def Version_11(cls, ctx, node, **kwargs): + axis_val = node.attrs.get("axis", None) + dtypes = node.output_dtypes + ctx.RemoveNode(node.name) + node1 = ctx.MakeNode( + "ConcatFromSequence", + node.input_tensor_names, + outputs=[node.output_tensor_names[0]], + op_name_scope=node.name, name="stack", + dtypes=dtypes, attr={"new_axis": 1, "axis": axis_val} + ) + + @classmethod + def Version_1(cls, ctx, node, **kwargs): + axis_val = node.attrs.get("axis", None) + dtypes = node.output_dtypes + output_shape = node.output_shapes[0] + node_concat = ctx.MakeNode( + "Concat", + node.input_tensor_names, + op_name_scope=node.name, name="concat", + dtypes=dtypes, attr={"axis": axis_val} + ) + node_unsqueeze = ctx.MakeNode( + "Unsqueeze", + node_concat.output_tensor_names, + op_name_scope=node.name, name="unsqueeze", + dtypes=dtypes, attr={"axes": [axis_val]}, + ) + ctx.RemoveNode(node.name) + node_reshape = ctx.MakeNode( + "Reshape", + node_unsqueeze.output_tensor_names, + outputs=node.output_tensor_names, + op_name_scope=node.name, name="reshape", + dtypes=dtypes, attr={"shape": output_shape}, + ) + @flow_op("slice", "Slice") class Slice: From 0f12276cb79a468c54533c49b8294a3130703fd0 Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Fri, 17 Feb 2023 12:40:05 +0800 Subject: [PATCH 3/9] del --- oneflow_onnx/oneflow2onnx/handlers/array.py | 22 --------------------- 1 file changed, 22 deletions(-) diff --git a/oneflow_onnx/oneflow2onnx/handlers/array.py b/oneflow_onnx/oneflow2onnx/handlers/array.py index 3501fa9..7e446b5 100644 --- a/oneflow_onnx/oneflow2onnx/handlers/array.py +++ b/oneflow_onnx/oneflow2onnx/handlers/array.py @@ -219,28 +219,6 @@ def Version_1(cls, ctx, node, **kwargs): pass -@flow_op("concat", "Concat") -class Concat: - @classmethod - def Version_1(cls, ctx, node, **kwargs): - # old concat op has axis as input[0] - axis_val = node.attrs.get("axis", None) - - if axis_val < 0: - input_shape = ctx.get_shape(node.input_tensor_names[0]) - axis_val = len(input_shape) + axis_val - node.attrs["axis"] = axis_val - - if ctx.opset < 8: - # opset < 8: might need to wrap concat in casts since only float is supported - _WrapConcatWithCast(ctx, node) - return - - @classmethod - def Version_11(cls, ctx, node, **kwargs): - # Opset 11 supports negative axis, but core logic is same - cls.Version_1(ctx, node, **kwargs) - @flow_op("concat", "Concat") class Concat: @classmethod From 708323a947f30cee617184c49cf9fb3c1cbe5736 Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Fri, 17 Feb 2023 12:41:51 +0800 Subject: [PATCH 4/9] add tset --- examples/oneflow2onnx/nodes/CPU/test_stack.py | 50 +++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 examples/oneflow2onnx/nodes/CPU/test_stack.py diff --git a/examples/oneflow2onnx/nodes/CPU/test_stack.py b/examples/oneflow2onnx/nodes/CPU/test_stack.py new file mode 100644 index 0000000..65c0f60 --- /dev/null +++ b/examples/oneflow2onnx/nodes/CPU/test_stack.py @@ -0,0 +1,50 @@ +""" +Copyright 2020 The OneFlow Authors. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import tempfile +import oneflow as flow +from oneflow_onnx.oneflow2onnx.util import convert_to_onnx_and_check + + +class Stack(flow.nn.Module): + def __init__(self) -> None: + super(Stack, self).__init__() + + def forward(self, x: flow.Tensor) -> flow.Tensor: + return flow.stack([x, x, x], dim=1) + + +stack = Stack() + + +class StackOpGraph(flow.nn.Graph): + def __init__(self): + super().__init__() + self.m = stack + + def build(self, x): + out = self.m(x) + return out + + +def test_stack(): + + stack_graph = StackOpGraph() + stack_graph._compile(flow.randn(1, 3, 224, 224)) + + convert_to_onnx_and_check(stack_graph, onnx_model_path="./temp") + + +test_stack() From f06672a5065fffe63d7f9fafb19f6775404ed282 Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Mon, 20 Feb 2023 09:39:04 +0800 Subject: [PATCH 5/9] done --- oneflow_onnx/oneflow2onnx/handlers/array.py | 53 ++++++++++++++------- 1 file changed, 37 insertions(+), 16 deletions(-) diff --git a/oneflow_onnx/oneflow2onnx/handlers/array.py b/oneflow_onnx/oneflow2onnx/handlers/array.py index 7e446b5..56fec73 100644 --- a/oneflow_onnx/oneflow2onnx/handlers/array.py +++ b/oneflow_onnx/oneflow2onnx/handlers/array.py @@ -23,8 +23,10 @@ import logging import sys +from pdb import set_trace import numpy as np +from onnx import helper from onnx import numpy_helper from onnx import onnx_pb from onnx.onnx_pb import TensorProto @@ -245,10 +247,11 @@ def Version_11(cls, ctx, node, **kwargs): class Stack: @classmethod def Version_11(cls, ctx, node, **kwargs): + print("version_11") axis_val = node.attrs.get("axis", None) dtypes = node.output_dtypes ctx.RemoveNode(node.name) - node1 = ctx.MakeNode( + ctx.MakeNode( "ConcatFromSequence", node.input_tensor_names, outputs=[node.output_tensor_names[0]], @@ -258,6 +261,7 @@ def Version_11(cls, ctx, node, **kwargs): @classmethod def Version_1(cls, ctx, node, **kwargs): + print(f"version_1: {ctx.opset}") axis_val = node.attrs.get("axis", None) dtypes = node.output_dtypes output_shape = node.output_shapes[0] @@ -265,23 +269,40 @@ def Version_1(cls, ctx, node, **kwargs): "Concat", node.input_tensor_names, op_name_scope=node.name, name="concat", - dtypes=dtypes, attr={"axis": axis_val} - ) - node_unsqueeze = ctx.MakeNode( - "Unsqueeze", - node_concat.output_tensor_names, - op_name_scope=node.name, name="unsqueeze", - dtypes=dtypes, attr={"axes": [axis_val]}, + dtypes=dtypes, attr={"axis": axis_val}, ) ctx.RemoveNode(node.name) - node_reshape = ctx.MakeNode( - "Reshape", - node_unsqueeze.output_tensor_names, - outputs=node.output_tensor_names, - op_name_scope=node.name, name="reshape", - dtypes=dtypes, attr={"shape": output_shape}, - ) - + # since opset 5 + # set_trace() + if ctx.opset > 4: + output_shape = np.array(output_shape) + output_shape_tensor = helper.make_tensor( + name='const_tensor', + data_type=TensorProto.INT64, + dims=output_shape.shape, + vals=output_shape.flatten(), + ) + node_constant = ctx.MakeNode( + "Constant", + [], + op_name_scope=node.name, name="constant", + dtypes=dtypes, attr={"value": output_shape_tensor}, + ) + node_reshape = ctx.MakeNode( + "Reshape", + node_concat.output_tensor_names + node_constant.output_tensor_names, + outputs=node.output_tensor_names, + op_name_scope=node.name, name="reshape", + dtypes=dtypes, + ) + else: + node_reshape = ctx.MakeNode( + "Reshape", + node_concat.output_tensor_names, + outputs=node.output_tensor_names, + op_name_scope=node.name, name="reshape", + dtypes=dtypes, attr={"shape": output_shape}, + ) @flow_op("slice", "Slice") class Slice: From d26e21e75cc164d1796c59b6839cef265cc385ca Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Mon, 20 Feb 2023 09:43:13 +0800 Subject: [PATCH 6/9] format --- oneflow_onnx/oneflow2onnx/handlers/array.py | 73 ++++++++++++++------- 1 file changed, 50 insertions(+), 23 deletions(-) diff --git a/oneflow_onnx/oneflow2onnx/handlers/array.py b/oneflow_onnx/oneflow2onnx/handlers/array.py index 56fec73..762165f 100644 --- a/oneflow_onnx/oneflow2onnx/handlers/array.py +++ b/oneflow_onnx/oneflow2onnx/handlers/array.py @@ -243,6 +243,7 @@ def Version_11(cls, ctx, node, **kwargs): # Opset 11 supports negative axis, but core logic is same cls.Version_1(ctx, node, **kwargs) + @flow_op("stack", "ConcatFromSequence") class Stack: @classmethod @@ -251,13 +252,7 @@ def Version_11(cls, ctx, node, **kwargs): axis_val = node.attrs.get("axis", None) dtypes = node.output_dtypes ctx.RemoveNode(node.name) - ctx.MakeNode( - "ConcatFromSequence", - node.input_tensor_names, - outputs=[node.output_tensor_names[0]], - op_name_scope=node.name, name="stack", - dtypes=dtypes, attr={"new_axis": 1, "axis": axis_val} - ) + ctx.MakeNode("ConcatFromSequence", node.input_tensor_names, outputs=[node.output_tensor_names[0]], op_name_scope=node.name, name="stack", dtypes=dtypes, attr={"new_axis": 1, "axis": axis_val}) @classmethod def Version_1(cls, ctx, node, **kwargs): @@ -268,8 +263,10 @@ def Version_1(cls, ctx, node, **kwargs): node_concat = ctx.MakeNode( "Concat", node.input_tensor_names, - op_name_scope=node.name, name="concat", - dtypes=dtypes, attr={"axis": axis_val}, + op_name_scope=node.name, + name="concat", + dtypes=dtypes, + attr={"axis": axis_val}, ) ctx.RemoveNode(node.name) # since opset 5 @@ -277,7 +274,7 @@ def Version_1(cls, ctx, node, **kwargs): if ctx.opset > 4: output_shape = np.array(output_shape) output_shape_tensor = helper.make_tensor( - name='const_tensor', + name="const_tensor", data_type=TensorProto.INT64, dims=output_shape.shape, vals=output_shape.flatten(), @@ -285,14 +282,17 @@ def Version_1(cls, ctx, node, **kwargs): node_constant = ctx.MakeNode( "Constant", [], - op_name_scope=node.name, name="constant", - dtypes=dtypes, attr={"value": output_shape_tensor}, + op_name_scope=node.name, + name="constant", + dtypes=dtypes, + attr={"value": output_shape_tensor}, ) node_reshape = ctx.MakeNode( "Reshape", node_concat.output_tensor_names + node_constant.output_tensor_names, outputs=node.output_tensor_names, - op_name_scope=node.name, name="reshape", + op_name_scope=node.name, + name="reshape", dtypes=dtypes, ) else: @@ -300,26 +300,41 @@ def Version_1(cls, ctx, node, **kwargs): "Reshape", node_concat.output_tensor_names, outputs=node.output_tensor_names, - op_name_scope=node.name, name="reshape", - dtypes=dtypes, attr={"shape": output_shape}, + op_name_scope=node.name, + name="reshape", + dtypes=dtypes, + attr={"shape": output_shape}, ) + @flow_op("slice", "Slice") class Slice: @classmethod def Version_1(cls, ctx, node, **kwargs): - starts = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("start"), np.array(node.attrs["start"]).astype(np.int64),) + starts = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("start"), + np.array(node.attrs["start"]).astype(np.int64), + ) node.input_tensor_names.append(starts.output_tensor_names[0]) - ends = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("stop"), np.array(node.attrs["stop"]).astype(np.int64),) + ends = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("stop"), + np.array(node.attrs["stop"]).astype(np.int64), + ) node.input_tensor_names.append(ends.output_tensor_names[0]) slice_axes = [] input_shape = ctx.get_shape(node.input_tensor_names[0]) for i in range(len(input_shape)): slice_axes.append(i) - axes = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("axes"), np.array(slice_axes).astype(np.int64),) + axes = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("axes"), + np.array(slice_axes).astype(np.int64), + ) node.input_tensor_names.append(axes.output_tensor_names[0]) - steps = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("steps"), np.array(node.attrs["step"]).astype(np.int64),) + steps = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("steps"), + np.array(node.attrs["step"]).astype(np.int64), + ) node.input_tensor_names.append(steps.output_tensor_names[0]) @classmethod @@ -350,13 +365,25 @@ def Version_1(cls, ctx, node, **kwargs): slice_starts.append(0) slice_ends.append(input_shape[i]) - starts = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("narrow_start"), np.array(slice_starts).astype(np.int64),) + starts = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("narrow_start"), + np.array(slice_starts).astype(np.int64), + ) node.input_tensor_names.append(starts.output_tensor_names[0]) - ends = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("narrow_length"), np.array(slice_ends).astype(np.int64),) + ends = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("narrow_length"), + np.array(slice_ends).astype(np.int64), + ) node.input_tensor_names.append(ends.output_tensor_names[0]) - axes = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("narrow_axes"), np.array(slice_axes).astype(np.int64),) + axes = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("narrow_axes"), + np.array(slice_axes).astype(np.int64), + ) node.input_tensor_names.append(axes.output_tensor_names[0]) - steps = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("narrow_steps"), np.array(slice_steps).astype(np.int64),) + steps = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("narrow_steps"), + np.array(slice_steps).astype(np.int64), + ) node.input_tensor_names.append(steps.output_tensor_names[0]) @classmethod From 8aa1b021de15dad69542c510bbf7b911a30f69cc Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Tue, 21 Feb 2023 08:30:54 +0800 Subject: [PATCH 7/9] clean --- oneflow_onnx/oneflow2onnx/handlers/array.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/oneflow_onnx/oneflow2onnx/handlers/array.py b/oneflow_onnx/oneflow2onnx/handlers/array.py index 762165f..01b0d70 100644 --- a/oneflow_onnx/oneflow2onnx/handlers/array.py +++ b/oneflow_onnx/oneflow2onnx/handlers/array.py @@ -23,10 +23,8 @@ import logging import sys -from pdb import set_trace import numpy as np -from onnx import helper from onnx import numpy_helper from onnx import onnx_pb from onnx.onnx_pb import TensorProto @@ -272,20 +270,8 @@ def Version_1(cls, ctx, node, **kwargs): # since opset 5 # set_trace() if ctx.opset > 4: - output_shape = np.array(output_shape) - output_shape_tensor = helper.make_tensor( - name="const_tensor", - data_type=TensorProto.INT64, - dims=output_shape.shape, - vals=output_shape.flatten(), - ) - node_constant = ctx.MakeNode( - "Constant", - [], - op_name_scope=node.name, - name="constant", - dtypes=dtypes, - attr={"value": output_shape_tensor}, + node_constant = ctx.MakeConst( + oneflow._oneflow_internal.UniqueStr("shape"), np.array(output_shape).astype(np.int64) ) node_reshape = ctx.MakeNode( "Reshape", From 6aa0d8aab40f32ba92f7d7eb654869ceca44fba6 Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Tue, 21 Feb 2023 08:32:00 +0800 Subject: [PATCH 8/9] format --- oneflow_onnx/oneflow2onnx/handlers/array.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/oneflow_onnx/oneflow2onnx/handlers/array.py b/oneflow_onnx/oneflow2onnx/handlers/array.py index 01b0d70..93c554c 100644 --- a/oneflow_onnx/oneflow2onnx/handlers/array.py +++ b/oneflow_onnx/oneflow2onnx/handlers/array.py @@ -270,9 +270,7 @@ def Version_1(cls, ctx, node, **kwargs): # since opset 5 # set_trace() if ctx.opset > 4: - node_constant = ctx.MakeConst( - oneflow._oneflow_internal.UniqueStr("shape"), np.array(output_shape).astype(np.int64) - ) + node_constant = ctx.MakeConst(oneflow._oneflow_internal.UniqueStr("shape"), np.array(output_shape).astype(np.int64)) node_reshape = ctx.MakeNode( "Reshape", node_concat.output_tensor_names + node_constant.output_tensor_names, From fd58d873eb48f3e31e0201890dbd369d822411f4 Mon Sep 17 00:00:00 2001 From: qmpzzpmq <405691733@qq.com> Date: Wed, 22 Feb 2023 15:05:57 +0800 Subject: [PATCH 9/9] update op_list.md --- docs/oneflow2onnx/op_list.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/oneflow2onnx/op_list.md b/docs/oneflow2onnx/op_list.md index a19e16f..4bf4b70 100644 --- a/docs/oneflow2onnx/op_list.md +++ b/docs/oneflow2onnx/op_list.md @@ -30,3 +30,4 @@ | 90 | ScalarLogicalLess| 91| ScalarLogicalGreater| 92| Gather | 93 | Expand | | 94 | fill_ | 95 | GeLU | 96 | LayerNorm | 97 | AmpIdentity | | 98 | fast_gelu | 99 | quick_gelu | 100 | fused_self_attention |101 |RMSLayerNorm | +| 102 | stack |