Hi,
While compiling a Tensorflow model I hit an error related to the Halide call type:
Traceback (most recent call last):
File "/home/tvm//model_evaluation/tvm_eval.py", line 252, in <module>
mod, params = tvm_eval_non_tuned()
File "/home/tvm/model_evaluation/tvm_eval.py", line 107, in tvm_eval_non_tuned
params=params)
File "/home/tvm/tvm/python/tvm/relay/build_module.py", line 207, in build
graph_json, mod, params = bld_mod.build(func, target, target_host, params)
File "/home/tvm/tvm/python/tvm/relay/build_module.py", line 108, in build
self._build(func, target, target_host)
File "/home/tvm/tvm/python/tvm/_ffi/_ctypes/function.py", line 210, in __call__
raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
[bt] (8) /home/tvm/tvm/build/libtvm.so(tvm::relay::ScheduleGetter::VisitExpr_(tvm::relay::CallNode const*)+0x650) [0x7f6d758e64a0]
[bt] (7) /home/tvm/tvm/build/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::runtime::TypedPackedFunc<tvm::Array<tvm::Tensor, void> (tvm::Attrs const&, tvm::Array<tvm::Tensor, void> const&, tvm::relay::Type const&, tvm::Target const&)>::AssignTypedLambda<tvm::Array<tvm::Tensor, void> (*)(tvm::Attrs const&, tvm::Array<tvm::Tensor, void> const&, tvm::relay::Type const&, tvm::Target const&)>(tvm::Array<tvm::Tensor, void> (*)(tvm::Attrs const&, tvm::Array<tvm::Tensor, void> const&, tvm::relay::Type const&, tvm::Target const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)+0xe3) [0x7f6d759e1813]
[bt] (6) /home/tvm/tvm/build/libtvm.so(tvm::relay::TakeCompute(tvm::Attrs const&, tvm::Array<tvm::Tensor, void> const&, tvm::relay::Type const&, tvm::Target const&)+0x164) [0x7f6d75a91544]
[bt] (5) /home/tvm/tvm/build/libtvm.so(topi::take(tvm::Tensor const&, tvm::Tensor const&, int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >)+0x468) [0x7f6d75aaba28]
[bt] (4) /home/tvm/tvm/build/libtvm.so(tvm::compute(tvm::Array<tvm::Expr, void>, std::function<tvm::Expr (tvm::Array<tvm::Var, void> const&)>, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::Map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::NodeRef, void, void>)+0x496) [0x7f6d7584a456]
[bt] (3) /home/tvm/tvm/build/libtvm.so(std::_Function_handler<tvm::Expr (tvm::Array<tvm::Var, void> const&), topi::take(tvm::Tensor const&, tvm::Tensor const&, int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >)::{lambda(tvm::Array<tvm::Var, void> const&)#1}>::_M_invoke(std::_Any_data const&, tvm::Array<tvm::Var, void> const&)+0xa4d) [0x7f6d75ab8dad]
[bt] (2) /home/tvm/tvm/build/libtvm.so(tvm::Tensor::operator()(tvm::Array<tvm::Expr, void>) const+0x155) [0x7f6d75725225]
[bt] (1) /home/tvm/tvm/build/libtvm.so(tvm::ir::Call::make(tvm::DataType, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::Array<tvm::Expr, void>, tvm::ir::Call::CallType, tvm::ir::FunctionRef, int)+0x441) [0x7f6d756f6011]
[bt] (0) /home/tvm/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x43) [0x7f6d7554a223]
File "/home/tvm/tvm/src/lang/ir.cc", line 196
TVMError: Check failed: args[i].type().is_int():
Where should look at in my model to identify the float32 args that triggers this exception?
Thanks!