diff --git a/nnvm/python/nnvm/frontend/tensorflow.py b/nnvm/python/nnvm/frontend/tensorflow.py index 143d995803d8f..ac47f9c3435f0 100644 --- a/nnvm/python/nnvm/frontend/tensorflow.py +++ b/nnvm/python/nnvm/frontend/tensorflow.py @@ -607,6 +607,26 @@ def _impl(inputs, in_state_c, in_state_h, attr, params): return _impl +def _pad(name): + def _impl(inputs, attr, params): + padlist_key = inputs[1].list_output_names()[0] + if padlist_key in params: + padlist = params.pop(padlist_key).asnumpy() + else: + raise RuntimeError("Required parameter {} not fount.".format(padlist_key)) + paddings = tuple([tuple(l) for l in padlist]) + attr['pad_width'] = paddings + attr['pad_value'] = 0 + new_inputs = [inputs[0]] + if name == 'PadV2': + constant_values = params.pop(inputs[2].list_output_names()[0]).asnumpy() + attr['pad_value'] = constant_values[0] + return AttrCvt( + op_name='pad', + ignores=['Tpaddings'],)(new_inputs, attr) + return _impl + + # compatible operators that do NOT require any conversion. _identity_list = [] @@ -649,6 +669,8 @@ def _impl(inputs, in_state_c, in_state_h, attr, params): 'GatherV2' : _gather_v2(), 'StridedSlice' : _stridedSlice(), 'LRN' : _lrn(), + 'Pad' : _pad('Pad'), + 'PadV2' : _pad('PadV2'), } # _convert_map_rnn defines maps of rnn operator name to diff --git a/nnvm/tests/python/frontend/tensorflow/test_forward.py b/nnvm/tests/python/frontend/tensorflow/test_forward.py index 0642e46b78637..96b472e8b3811 100644 --- a/nnvm/tests/python/frontend/tensorflow/test_forward.py +++ b/nnvm/tests/python/frontend/tensorflow/test_forward.py @@ -704,6 +704,45 @@ def test_forward_resize_bilinear(): _test_resize_bilinear((4, 16, 32, 32), [50, 50], False) _test_resize_bilinear((6, 32, 64, 64), [20, 20], True) +####################################################################### +# Pad +# --- +def _test_pad(input_shape, paddings, mode, **kwargs): + """ One iteration of pad operation with given shape""" + + x = np.arange(np.prod(input_shape), dtype=np.float32).reshape(input_shape) + + with tf.Graph().as_default(): + in_data = constant_op.constant(x, shape=input_shape, dtype='float32') + pad_values = constant_op.constant(paddings) + pad = tf.pad(in_data, paddings=pad_values, mode=mode, **kwargs) + + if mode == 'CONSTANT': + if 'constant_values' in kwargs: + out_node = 'PadV2' + out_name = 'PadV2:0' + else: + out_node = 'Pad' + out_name = 'Pad:0' + + with tf.Session() as sess: + graph_def = tf.graph_util.convert_variables_to_constants( + sess, + sess.graph.as_graph_def(add_shapes=True), + [out_node], + ) + + tf_output = run_tf_graph(sess, x, 'Const:0', out_name) + tvm_output = run_tvm_graph(graph_def, x.astype('float32'), + "Const", tf_output.shape, 'float32') + np.testing.assert_allclose(tf_output, tvm_output) + sess.close() + +def test_forward_pad(): + """ Pad """ + _test_pad((2, 3), [[1,1], [2,2]], mode="CONSTANT") + _test_pad((2, 3), [[1,1], [2,2]], mode="CONSTANT", constant_values=1.0) + ####################################################################### # Inception V3 @@ -936,6 +975,7 @@ def test_forward_lrn(): test_forward_mobilenet() test_forward_variable() test_forward_resize_bilinear() + test_forward_pad() test_forward_lstm() test_forward_stridedslice() test_forward_gather()