Tvm.reduce_axis range end can not from a tensor

I write the dsl as follows.

import tvm
shape = (2, 1)
reduce_end = tvm.placeholder(shape, name=“reduce_end”, dtype=“int32”)
fm = tvm.placeholder((5, 4, 16), name=“fm”)
reduce_2 = tvm.compute(shape, lambda *indice: reduce_end(*indice)*tvm.const(2, dtype=“int32”), name=“reduce_2”)
bin_w = reduce_2[0, 0]
#bin_w = 4
var_reduce_axis = tvm.reduce_axis((0, bin_w), name=“var_reduce_axis”)
res = tvm.compute((5, 16), lambda i1, i2:
tvm.max(fm[i1, var_reduce_axis, i2], axis=var_reduce_axis), name=“res”)
s = tvm.create_schedule(res.op)
red_ub = s.cache_read(reduce_end, “local.UB”, [reduce_2])
fm_ub = s.cache_read(fm, “local.UB”, [res])
print(tvm.lower(s, [reduce_end, fm, res], simple_mode=True))

and when i run python, i get the error as follows
raise TVMError(py_str(_LIB.TVMGetLastError()))
tvm._ffi.base.TVMError: [09:40:44] …/src/schedule/bound.cc:135: Check failed: it != rmap->end()

I think the main reason here is the reduce_axis, right end range is from a tensor, as follows
bin_w = reduce_2[0, 0]
#bin_w = 4
var_reduce_axis = tvm.reduce_axis((0, bin_w), name=“var_reduce_axis”)

Does anyone can give me some advise?

1 Like

I tried to use variable reduce_axis length and got the same error as you.

@etaf when did you try this? something like:

from topi.util import get_const_tuple
import tvm

def func(Elements, Lengths):
    def f(n, d):
        rg = tvm.reduce_axis((0, Lengths[n]))
        return tvm.sum(Elements[rg, d], axis=rg)

    (N,) = get_const_tuple(Lengths.shape)
    (_, D) = get_const_tuple(Elements.shape)
    return tvm.compute((N, D), f, name="Y")

def run(N, I, D):
    Elements = tvm.placeholder(shape=(I, D), dtype="float32", name="Elements")
    Lengths = tvm.placeholder(shape=(N,), dtype="int32", name="Lengths")
    Y = func(Elements, Lengths)
    s = tvm.create_schedule([Y.op])
    print(tvm.lower(s, [Elements, Lengths, Y], simple_mode=True))
    print(tvm.save_json(Y))
    f = tvm.build(s, [Elements, Lengths, Y], target="llvm")

run(N=10, I=10, D=128)

works on master after the PR in https://github.com/dmlc/tvm/pull/2208 was merge (see issue https://github.com/dmlc/tvm/issues/2207).

1 Like

@ajtulloch Cool, I failed because the PR have not been merged into my local TVM.

@ajtulloch
Cool. But I used the master after PR and changed the the code a little, add one line “elements_ub = s.cache_read(Elements, “local.UB”, [Y]) //added code”.
I also failed in tvm.build() with “/src/pass/make_api.cc:184: Not all Vars are passed in api_args: ‘n’ does not appeared in api_args”

from topi.util import get_const_tuple
import tvm

def func(Elements, Lengths):
def f(n, d):
rg = tvm.reduce_axis((0, Lengths[n]))
return tvm.sum(Elements[rg, d], axis=rg)

(N,) = get_const_tuple(Lengths.shape)
(_, D) = get_const_tuple(Elements.shape)
return tvm.compute((N, D), f, name="Y")

def run(N, I, D):
Elements = tvm.placeholder(shape=(I, D), dtype=“float32”, name=“Elements”)
Lengths = tvm.placeholder(shape=(N,), dtype=“int32”, name=“Lengths”)
Y = func(Elements, Lengths)
s = tvm.create_schedule([Y.op])
elements_ub = s.cache_read(Elements, “local.UB”, [Y]) //added code
print(tvm.lower(s, [Elements, Lengths, Y], simple_mode=True))
print(tvm.save_json(Y))
f = tvm.build(s, [Elements, Lengths, Y], target=“llvm”)

run(N=10, I=10, D=128)