From 2a96704454e8870c04765af5284932a117f062e5 Mon Sep 17 00:00:00 2001 From: Jared Roesch <roeschinc@gmail.com> Date: Mon, 17 Dec 2018 16:18:43 -0800 Subject: [PATCH] [RELAY] Add broadcast_to operator (#2276) --- docs/langref/relay_op.rst | 1 + python/tvm/relay/op/_transform.py | 1 + python/tvm/relay/op/transform.py | 18 +++++++++++ src/relay/op/nn/pooling.cc | 3 +- src/relay/op/tensor/transform.cc | 46 +++++++++++++++++++++++++++ tests/python/relay/test_op_level10.py | 18 +++++++++++ 6 files changed, 85 insertions(+), 2 deletions(-) diff --git a/docs/langref/relay_op.rst b/docs/langref/relay_op.rst index f4a65023e..5c3ab8b1f 100644 --- a/docs/langref/relay_op.rst +++ b/docs/langref/relay_op.rst @@ -124,6 +124,7 @@ This level enables additional math and transform operators. tvm.relay.mean tvm.relay.prod tvm.relay.strided_slice + tvm.relay.broadcast_to **Level 5: Vision/Image Operators** diff --git a/python/tvm/relay/op/_transform.py b/python/tvm/relay/op/_transform.py index c1e71e913..085a8ceed 100644 --- a/python/tvm/relay/op/_transform.py +++ b/python/tvm/relay/op/_transform.py @@ -11,6 +11,7 @@ schedule_broadcast = _reg.schedule_injective _reg.register_schedule("collapse_sum_like", _schedule_reduce) +_reg.register_schedule("broadcast_to", schedule_broadcast) _reg.register_schedule("broadcast_to_like", schedule_broadcast) _reg.register_schedule("expand_dims", schedule_broadcast) _reg.register_schedule("squeeze", schedule_injective) diff --git a/python/tvm/relay/op/transform.py b/python/tvm/relay/op/transform.py index f536e75fd..2791eaf7d 100644 --- a/python/tvm/relay/op/transform.py +++ b/python/tvm/relay/op/transform.py @@ -267,6 +267,24 @@ def where(condition, x, y): """ return _make.where(condition, x, y) +def broadcast_to(data, shape): + """Return an scalar value array with the same type, broadcast to + the provided shape. + + Parameters + ---------- + data : relay.Expr + The input tensor. + + shape : shape + Provide the shape to broadcast to. + + Returns + ------- + result : relay.Expr + The resulting tensor. + """ + return _make.broadcast_to(data, shape) def broadcast_to_like(data, broadcast_type): """Return an scalar value array with the same shape and type as the input array. diff --git a/src/relay/op/nn/pooling.cc b/src/relay/op/nn/pooling.cc index 6233e6d51..6cf37668c 100644 --- a/src/relay/op/nn/pooling.cc +++ b/src/relay/op/nn/pooling.cc @@ -258,8 +258,7 @@ bool GlobalPool2DRel(const Array<Type>& types, const TypeReporter& reporter) { CHECK_EQ(types.size(), 2); const auto* data = types[0].as<TensorTypeNode>(); - - CHECK(data != nullptr); + if (data == nullptr) { return false; } const auto dshape = data->shape; CHECK_NE(dshape.size(), 0); CHECK_GE(dshape.size(), 2U) diff --git a/src/relay/op/tensor/transform.cc b/src/relay/op/tensor/transform.cc index fcf7f6fe3..eb8b4f13f 100644 --- a/src/relay/op/tensor/transform.cc +++ b/src/relay/op/tensor/transform.cc @@ -1084,6 +1084,52 @@ RELAY_REGISTER_OP("collapse_sum_like") .set_attr<FTVMCompute>("FTVMCompute", CollapseSumLikeCompute) .set_attr<TOpPattern>("TOpPattern", kCommReduce); +// BroadCastTo: <A, B> -> B where BroadCast(A, B) = B +bool BroadCastToRel(const Array<Type>& types, + int num_inputs, + const Attrs& attrs, + const TypeReporter& reporter) { + CHECK_EQ(types.size(), 2); + auto ioattrs = attrs.as<InitOpAttrs>(); + CHECK(ioattrs); + auto intt = types[0].as<TensorTypeNode>(); + if (intt == nullptr) { return false; } + auto type = TensorTypeNode::make(ioattrs->shape, intt->dtype); + reporter->Assign(types[1], type); + return true; +} + +Expr MakeBroadCastTo(Expr data, Array<IndexExpr> shape) { + static const Op& op = Op::Get("broadcast_to"); + auto attrs = make_node<InitOpAttrs>(); + attrs->shape = std::move(shape); + return CallNode::make(op, {data}, Attrs(attrs), {}); +} + +Array<Tensor> BroadCastToCompute(const Attrs& attrs, + const Array<Tensor>& inputs, + const Type& out_type, + const Target& target) { + auto ioattrs = attrs.as<InitOpAttrs>(); + CHECK(ioattrs != nullptr); + return { topi::broadcast_to(inputs[0], ioattrs->shape) }; +} + +TVM_REGISTER_API("relay.op._make.broadcast_to") +.set_body([](const TVMArgs& args, TVMRetValue* rv) { + runtime::detail::unpack_call<Expr, 2>(MakeBroadCastTo, args, rv); + }); + +RELAY_REGISTER_OP("broadcast_to") +.describe(R"code(Broadcast the first input to match the shape argument. +)code" TVM_ADD_FILELINE) +.set_num_inputs(1) +.add_argument("data", "Tensor", "The input tensor.") +.set_support_level(4) +.add_type_rel("BroadCastTo", BroadCastToRel) +.set_attr<FTVMCompute>("FTVMCompute", BroadCastToCompute) +.set_attr<TOpPattern>("TOpPattern", kBroadcast); + // BroadCastToLike: <A, B> -> B where BroadCast(A, B) = B bool BroadCastToLikeRel(const Array<Type>& types, int num_inputs, diff --git a/tests/python/relay/test_op_level10.py b/tests/python/relay/test_op_level10.py index 5d65691a2..2c0ed73a7 100644 --- a/tests/python/relay/test_op_level10.py +++ b/tests/python/relay/test_op_level10.py @@ -25,6 +25,24 @@ def test_collapse_sum_like(): op_res = intrp.evaluate(func)(x, y) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) +def test_broadcast_to(): + shape = (4, 1, 6) + shape_like = (3, 4, 5, 6) + dtype = "float32" + x = relay.Var("x", relay.ty.TensorType(shape , dtype)) + z = relay.broadcast_to(x, shape=shape_like) + zz = relay.ir_pass.infer_type(z) + assert zz.checked_type == relay.ty.TensorType(shape_like, dtype) + + func = relay.Function([x], z) + x = np.random.uniform(size=shape).astype(dtype) + ref_res = np.broadcast_to(x, shape_like) + for target, ctx in ctx_list(): + for kind in ["graph", "debug"]: + intrp = relay.create_executor(kind, ctx=ctx, target=target) + op_res = intrp.evaluate(func)(x) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) + def test_broadcast_to_like(): shape = (4, 1, 6) shape_like = (3, 4, 5, 6) -- GitLab