Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
tkonolige committed Nov 11, 2020
1 parent 8cf300f commit 9ec74c6
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 16 deletions.
2 changes: 1 addition & 1 deletion python/tvm/topi/cuda/scatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,7 +575,7 @@ def gen_ir(data_ptr, indices_ptr, out_ptr):
fused_indices_dimension *= i

fused_data_dimension = 1
for i in data_ptr.shape[indices_ptr.shape[0].value :]:
for i in data_ptr.shape[len(indices_ptr.shape)-1 :]:
fused_data_dimension *= i

fused_shape = 1
Expand Down
6 changes: 3 additions & 3 deletions python/tvm/topi/scatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,11 +280,11 @@ def gen_ir(data_ptr, indices_ptr, out_ptr):
fused_indices_dimension *= i

fused_data_dimension = 1
for i in data_ptr.shape[indices_ptr.shape[0].value :]:
for i in data_ptr.shape[len(indices_ptr.shape)-1 :]:
fused_data_dimension *= i

with ib.for_range(0, fused_indices_dimension) as i:
with ib.for_range(0, fused_data_dimension) as j:
with ib.for_range(0, fused_indices_dimension, name='i') as i:
with ib.for_range(0, fused_data_dimension, name='j') as j:
offset = fused_data_dimension
index = j # This is x_M, .. x_{N-1} part of the index into out.
# Build up the indices[0, y_0, .. y_{K-1}], .. indices[M-1, y_0, .. y_{K-1}] part
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/topi/testing/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,4 +107,4 @@ def compare_numpy_tvm(inputs, output, target, ctx, compute, schedule):
func = tvm.build(s, te_inputs + [out])
arys = [tvm.nd.array(x, ctx=ctx) for x in inputs]
func(*(arys + [te_out]))
assert_allclose(output, te_out.asnumpy(), atol=1e-4, rtol=1e-4)
assert_allclose(te_out.asnumpy(), output, atol=1e-4, rtol=1e-4)
2 changes: 1 addition & 1 deletion python/tvm/topi/x86/scatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def gen_ir(data_ptr, indices_ptr, out_ptr):
fused_indices_dimension *= i

fused_data_dimension = 1
for i in data_ptr.shape[indices_ptr.shape[0].value :]:
for i in data_ptr.shape[len(indices_ptr.shape)-1 :]:
fused_data_dimension *= i

fused_shape = 1
Expand Down
2 changes: 1 addition & 1 deletion src/relay/op/tensor/transform.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1016,7 +1016,7 @@ bool ScatterNDRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,

// data: (Y_0, .. Y_{K-1}, X_M, .. X_{N-1}) out: (X_0, .. X_{N-1}), verify X_M to X_{N-1}
for (size_t i = mdim->value; i < ndim; i++) {
reporter->AssertEQ(data->shape[i - mdim + kdim], oshape[i]);
reporter->AssertEQ(data->shape[i - mdim->value + kdim], oshape[i]);
}

reporter->Assign(types[2], TensorType(oshape, data->dtype));
Expand Down
19 changes: 10 additions & 9 deletions tests/python/topi/python/test_topi_scatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ def test_scatter_nd(ctx, target):
def check_scatter_nd(data, indices, shape, out):
implementations = {
"generic": (lambda x, y: topi.scatter_nd(x, y, shape), topi.generic.schedule_extern),
"cuda": (lambda x, y: topi.cuda.scatter_nd(x, y, shape), topi.generic.schedule_extern),
"llvm": (lambda x, y: topi.x86.scatter_nd(x, y, shape), topi.generic.schedule_extern),
"gpu": (lambda x, y: topi.cuda.scatter_nd(x, y, shape), topi.generic.schedule_extern),
"cpu": (lambda x, y: topi.x86.scatter_nd(x, y, shape), topi.generic.schedule_extern),
}
fcompute, fschedule = tvm.topi.testing.dispatch(target, implementations)
tvm.topi.testing.compare_numpy_tvm([data, indices], out, target, ctx, fcompute, fschedule)
Expand All @@ -41,7 +41,7 @@ def check_scatter_nd(data, indices, shape, out):
data = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
indices = np.array([[0, 1], [1, 1]])
shape = (2, 2, 2, 2)
out = np.array([[[[0, 0], [1, 2]], [[0, 0], [3, 4]]], [[[0, 0], [0, 0]], [[0, 0], [0, 0]]]])
out = np.array([[[[0, 0], [0, 0]], [[1, 2], [3, 4]]], [[[0, 0], [0, 0]], [[5, 6], [7, 8]]]])
check_scatter_nd(data, indices, shape, out)

data = np.reshape(np.arange(1560 * 3), (3, 1560)).astype("float32")
Expand All @@ -53,12 +53,13 @@ def check_scatter_nd(data, indices, shape, out):
out[0, :] += data[2, :]
check_scatter_nd(data, indices, shape, out)

data = np.random.rand((40, 768))
indices = np.stack((np.random.randint(40, size=40), np.random.randint(768, size=40)))
shape = (8, 50, 768)
out = np.zeros(shape).astype("float32")
for i in range(40):
out[indices[0, i], indices[1, i], :] += data[i, :]
data = np.ones((5, 3)).astype("float64")
indices = np.stack((np.random.randint(2, size=5), np.random.randint(7, size=5))).astype("int64")
shape = (2, 7, 3)
out = np.zeros(shape).astype("float64")
for i in range(indices.shape[1]):
for j in range(data.shape[1]):
out[indices[0, i], indices[1, i], j] += data[i, j]
check_scatter_nd(data, indices, shape, out)


Expand Down

0 comments on commit 9ec74c6

Please sign in to comment.