[Error]"TVMError: Not all Vars are passed in api_args..." whenever I set target to Mali GPU

I’m trying to implement ResNet50 using Mali GPU on my TinkerBoard S (SBC with RK2388 chip), but every time I set target=‘opencl -device=mali’,
“TVMError: Not all Vars are passed in api_args: ‘threadIdx.x’ does not appear in api_args” shows up.

For example, when I run this code on my TinkerBoard S,

import tvm
import tvm.relay as relay
from tvm.contrib import graph_runtime
import mxnet as mx
import numpy as np
from tvm.contrib.download import download_testdata
from mxnet.gluon.model_zoo.vision import get_model
from PIL import Image
from matplotlib impor t pyplot as plt

block = get_model(‘resnet50_v2’,pretrained=True)
input = np.random.randn(1,3,224,224)
shape_dict = {‘data’:input.shape}
mod, params = relay.frontend.from_mxnet(block, shape_dict)

target = tvm.target.create(‘opencl -device=mali’)
target_host = ‘llvm -target=arm-linux-gnueabihf’

with relay.build_config(opt_level=0):
graph,lib,params =relay.build_module.build(mod=mod,params=params,target=target,target_host=target_host)

ctx = tvm.cl(0)
dtype = ‘float32’

m = graph_runtime.create(graph, lib, ctx)
m.set_input(‘data’, tvm.nd.array(input.astype(dtype)))
m.set_input(**params)
m.run()

tvm_output = m.get_output(0)
print(‘input.shape:’,input.shape)
print(‘output shape:’,tvm_output.asnumpy().shape)

this error shows up.

[04:48:16] /home/linaro/tvm/src/pass/loop_partition.cc:541: Cannot prove: ((((((((((((blockIdx.x8) + threadIdx.x) % 16)/4)4) + (threadIdx.x % 4)) + 1) - (((blockIdx.x8) + threadIdx.x) % 16)) - 1) - 1) + 1) >= 0), when generating the post doubt loop
Traceback (most recent call last):
File “RN50MaliTest.py”, line 24, in
graph,lib,params = relay.build_module.build(mod=mod,target=target,params=params,target_host=target_host)
File “/home/linaro/tvm/python/tvm/relay/build_module.py”, line 207, in build
graph_json, mod, params = bld_mod.build(func, target, target_host, params)
File “/home/linaro/tvm/python/tvm/relay/build_module.py”, line 108, in build
self._build(func, target, target_host)
File “/home/linaro/tvm/python/tvm/_ffi/_ctypes/function.py”, line 209, in call
raise get_last_ffi_error()
tvm.ffi.base.TVMError: Traceback (most recent call last):
[bt] (8) /home/linaro/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr(tvm::relay::Expr const&)+0x3d1) [0xae5e0f56]
[bt] (7) /home/linaro/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr
(tvm::relay::CallNode const
)+0x677) [0xae5e3204]
[bt] (6) /home/linaro/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr(tvm::relay::Expr const&)+0x3d1) [0xae5e0f56]
[bt] (5) /home/linaro/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr_(tvm::relay::CallNode const*)+0x677) [0xae5e3204]
[bt] (4) /home/linaro/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr(tvm::relay::Expr const&)+0x3d1) [0xae5e0f56]
[bt] (3) /home/linaro/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr_(tvm::relay::CallNode const*)+0x431) [0xae5e2fbe]
[bt] (2) /home/linaro/tvm/build/libtvm.so(+0x962854) [0xae5ca854]
[bt] (1) /home/linaro/tvm/build/libtvm.so(tvm::relay::CompileEngineImpl::LowerInternal(tvm::relay::CCacheKey const&)+0x78d) [0xae5cfbde]
[bt] (0) /home/linaro/tvm/build/libtvm.so(+0xbcacc0) [0xae832cc0]
File “/home/linaro/tvm/python/tvm/relay/backend/_backend.py”, line 51, in lower
f = _build.lower(sch, inputs, name=func_name)
File “/home/linaro/tvm/python/tvm/build_module.py”, line 416, in lower
return ir_pass.MakeAPI(stmt, name, arg_list, 0, cfg.restricted_func)
File “/home/linaro/tvm/python/tvm/_ffi/_ctypes/function.py”, line 209, in call
raise get_last_ffi_error()
[bt] (3) /home/linaro/tvm/build/libtvm.so(TVMFuncCall+0x27) [0xae835e84]
[bt] (2) /home/linaro/tvm/build/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), void tvm::runtime::TypedPackedFunc<tvm::LoweredFunc (HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool)>::AssignTypedLambda<tvm::LoweredFunc ()(HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool)>(tvm::LoweredFunc ()(HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)+0x91) [0xae3c997a]
[bt] (1) /home/linaro/tvm/build/libtvm.so(tvm::ir::MakeAPI(HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool)+0x2963) [0xae5207a4]
[bt] (0) /home/linaro/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x1d) [0xae39ee2a]
File “/home/linaro/tvm/src/pass/make_api.cc”, line 188
File “/home/linaro/tvm/python/tvm/_ffi/_ctypes/function.py”, line 71, in cfun
rv = local_pyfunc(pyargs)
File “/home/linaro/tvm/python/tvm/relay/backend/_backend.py”, line 59, in lower
raise RuntimeError(msg)
File “/home/linaro/tvm/python/tvm/relay/backend/_backend.py”, line 51, in lower
f = _build.lower(sch, inputs, name=func_name)
File “/home/linaro/tvm/python/tvm/build_module.py”, line 416, in lower
return ir_pass.MakeAPI(stmt, name, arg_list, 0, cfg.restricted_func)
File “/home/linaro/tvm/python/tvm/_ffi/_ctypes/function.py”, line 209, in call
raise get_last_ffi_error()
[bt] (3) /home/linaro/tvm/build/libtvm.so(TVMFuncCall+0x27) [0xae835e84]
[bt] (2) /home/linaro/tvm/build/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue
), void tvm::runtime::TypedPackedFunc<tvm::LoweredFunc (HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool)>::AssignTypedLambda<tvm::LoweredFunc ()(HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool)>(tvm::LoweredFunc ()(HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)+0x91) [0xae3c997a]
[bt] (1) /home/linaro/tvm/build/libtvm.so(tvm::ir::MakeAPI(HalideIR::Internal::Stmt, std::__cxx11::basic_string<char, std::char_traits, std::allocator >, tvm::Array<tvm::NodeRef, void>, int, bool)+0x2963) [0xae5207a4]
[bt] (0) /home/linaro/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x1d) [0xae39ee2a]
File “/home/linaro/tvm/src/pass/make_api.cc”, line 188
TVMError: Not all Vars are passed in api_args: ‘threadIdx.x’ does not appear in api_args
During handling of the above exception, another exception occurred:

TVMError: Not all Vars are passed in api_args: ‘threadIdx.x’ does not appear in api_args
Error during compile function

v0.0.1
fn (%p0: Tensor[(1, 512, 7, 7), float32], %p1: Tensor[(512, 512, 3, 3), float32], dict=meta[StrMap][0]) -> Tensor[(1, 512, 7, 7), float32] {
nn.conv2d(%p0, %p1, padding=[1, 1], channels=512, kernel_size=[3, 3]) /* ty=Tensor[(1, 512, 7, 7), float32] /
}
/
meta data */
{
“root”: 1,
“nodes”: [
{
“type_key”: “”
},
{
“type_key”: “StrMap”,
“keys”: [
“StrMap”
],
“data”: [2]
},
{
“type_key”: “Array”,
“data”: [3]
},
{
“type_key”: “StrMap”,
“keys”: [
“Primitive”
],
“data”: [4]
},
{
“type_key”: “IntImm”,
“attrs”: {
“dtype”: “int32”,
“value”: “1”
}
}
],
“b64ndarrays”: [],
“attrs”: {“tvm_version”: “0.6.dev”}

However, when I tried to perform simple computation using Mali GPU by binding tensor axis to GPU threads

s[B].bind(xi, tvm.thread_axis(“threadIdx.x”))

like what did in this page, It worked.
So probably I CAN use Mali GPU…

I have no idea about cause of this error.

Can anyone figure out what’s the cause?
Is it a bug or something?

Tracked in https://github.com/dmlc/tvm/issues/3980