Relay.frontend.from_tflite fails with Cannot resolve type of Var(dense_4_input) at (nullptr)

I have a rather simple tflite model which I’m trying to make use of

import os
import pprint
import numpy as np
import tvm
import tvm.micro as micro
from tvm.contrib import graph_runtime, util
from tvm import relay
from PIL import Image
from tvm import te
from tvm import relay
from tvm.runtime import container
from tvm.runtime import vm as vm_rt
from tvm.relay import testing
from tvm.relay import vm
from tvm.contrib.download import download_testdata

model_dir ="/home/tgall/tvm/utvm-exp/"
tflite_model_file = os.path.join(model_dir, "sine_model.tflite")
tflite_model_buf = open(tflite_model_file, "rb").read()

# Get TFLite model from buffer
try:
    import tflite
    tflite_model = tflite.Model.GetRootAsModel(tflite_model_buf, 0)
except AttributeError:
    import tflite.Model
    tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buf, 0)


input_tensor = "input"
input_shape = (1,)
input_dtype = "float32"

dev_config = micro.device.arm.stm32f746xx.generate_config("127.0.0.1", 6666)

mod, params = relay.frontend.from_tflite(tflite_model,
                                         shape_dict={input_tensor: input_shape},
                                         dtype_dict={input_tensor: input_dtype})

with micro.Session(dev_config) as sess:
    ctx = tvm.micro_dev(0)

    disable_vectorize = tvm.target.build_config(disable_vectorize=True)
    disable_fusion = relay.build_config(disabled_pass={'FuseOps'})
    with disable_vectorize, disable_fusion:
        graph, c_mod, params = relay.build(mod, target=TARGET, params=params)
    micro_mod = micro.create_micro_mod(c_mod, dev_config)
    mod = graph_runtime.create(graph, micro_mod, ctx)
    mod.set_input(**params)

This fails at mod, params = relay.frontend.from_tflite(tflite_model, shape_dict={input_tensor: input_shape}, dtype_dict={input_tensor: input_dtype})

with Traceback (most recent call last):

  File "./working-micro-st-tflite.py", line 38, in <module>
    dtype_dict={input_tensor: input_dtype})

  File "/home/tgall/tvm/tvm/python/tvm/relay/frontend/tflite.py", line 2582, in from_tflite
    mod = IRModule.from_expr(func)

  File "/home/tgall/tvm/tvm/python/tvm/ir/module.py", line 222, in from_expr
    return _ffi_api.Module_FromExpr(expr, funcs, defs)

  File "/home/tgall/tvm/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 225, in __call__
    raise get_last_ffi_error()

tvm._ffi.base.TVMError: Traceback (most recent call last):
  [bt] (8) /home/tgall/tvm/tvm/build/libtvm.so(tvm::RelayExpr tvm::relay::TypeInferencer::Resolver::AttachCheckedType<tvm::relay::FunctionNode>(tvm::relay::FunctionNode const*)+0x1bf) [0x7f7b82c375bf]
  [bt] (7) /home/tgall/tvm/tvm/build/libtvm.so(tvm::relay::ExprMutator::VisitExpr_(tvm::relay::FunctionNode const*)+0x659) [0x7f7b82ceefb9]
  [bt] (6) /home/tgall/tvm/tvm/build/libtvm.so(tvm::relay::ExprMutator::VisitExpr(tvm::RelayExpr const&)+0x96) [0x7f7b82cf1a06]
  [bt] (5) /home/tgall/tvm/tvm/build/libtvm.so(tvm::relay::ExprFunctor<tvm::RelayExpr (tvm::RelayExpr const&)>::VisitExpr(tvm::RelayExpr const&)+0x82) [0x7f7b82cf71b2]
  [bt] (4) /home/tgall/tvm/tvm/build/libtvm.so(tvm::relay::ExprFunctor<tvm::RelayExpr (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<tvm::RelayExpr (tvm::RelayExpr const&)>*)#3}::_FUN(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<tvm::RelayExpr (tvm::RelayExpr const&)>*)+0x2c) [0x7f7b82cf3e0c]
  [bt] (3) /home/tgall/tvm/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Resolver::VisitExpr_(tvm::relay::VarNode const*)+0x87) [0x7f7b82c3af27]
  [bt] (2) /home/tgall/tvm/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Resolver::VisitVar(tvm::relay::Var const&)+0xe2) [0x7f7b82c3acf2]
  [bt] (1) /home/tgall/tvm/tvm/build/libtvm.so(tvm::RelayExpr tvm::relay::TypeInferencer::Resolver::AttachCheckedType<tvm::relay::VarNode>(tvm::relay::VarNode const*)+0x1b3) [0x7f7b82c34663]
  [bt] (0) /home/tgall/tvm/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x79) [0x7f7b8235bf19]
  File "/home/tgall/tvm/tvm/src/relay/transforms/type_infer.cc", line 689
TVMError: Check failed: checked_type.as<IncompleteTypeNode>() == nullptr: Cannot resolve type of Var(dense_4_input) at (nullptr)

I’ve dropped the model into http://people.linaro.org/~tom.gall/sine_model.tflite

Any suggestions how to best debug this?

Thanks!

I’ve done a bit more debug which is useful for trying to understand how things are put together. The n00b hat is firmly in place so again, pointers how best to debug this dark back alleyway of libtvm.so is deeply appreciated.

If I dump the tflite model using tensor flows tools I have : https://people.linaro.org/~tom.gall/visualized_model.html

The func object that is pass into Module_fromExpr has the following: FunctionNode([Var(dense_4_input), Var(_param_1, ty=TensorType([16, 1], float32)), Var(_param_2, ty=TensorType([16], float32)), Var(_param_3, ty=TensorType([16, 16], float32)), Var(_param_4, ty=TensorType([16], float32)), Var(_param_5, ty=TensorType([1, 16], float32)), Var(_param_6, ty=TensorType([1], float32))], (nullptr), CallNode(Op(nn.bias_add), [CallNode(Op(nn.dense), [CallNode(Op(reshape), [CallNode(Op(nn.relu), [CallNode(Op(nn.bias_add), [CallNode(Op(nn.dense), [CallNode(Op(reshape), [CallNode(Op(nn.relu), [CallNode(Op(nn.bias_add), [CallNode(Op(nn.dense), [CallNode(Op(reshape), [Var(dense_4_input)], relay.attrs.ReshapeAttrs(0x173c4e8), []), Var(_param_1, ty=TensorType([16, 1], float32))], relay.attrs.DenseAttrs(0x16a3088), []), Var(_param_2, ty=TensorType([16], float32))], relay.attrs.BiasAddAttrs(0x173d818), [])], (nullptr), [])], relay.attrs.ReshapeAttrs(0x16c4ce8), []), Var(_param_3, ty=TensorType([16, 16], float32))], relay.attrs.DenseAttrs(0x16c1418), []), Var(_param_4, ty=TensorType([16], float32))], relay.attrs.BiasAddAttrs(0x16541d8), [])], (nullptr), [])], relay.attrs.ReshapeAttrs(0x16c18f8), []), Var(_param_5, ty=TensorType([1, 16], float32))], relay.attrs.DenseAttrs(0x16c1c68), []), Var(_param_6, ty=TensorType([1], float32))], relay.attrs.BiasAddAttrs(0x17359e8), []), [], (nullptr))

the “crash” is TVMError: Check failed: checked_type.as() == nullptr: Cannot resolve type of Var(dense_4_input) at (nullptr)

So I guess the question is instead of a nullptr, that should be something to the tune of float32??

Hi Tom,

It looks like you are hitting a bad combination of 3 issues.

  1. the name of your input in the shape dictionary doesn’t match the one in the TF model. Unfortunately TF uses strings for correctly matching inputs, and they must match exactly or you will have issues.
  2. The TF frontend should warn you that you didn’t provide a shape for the inputs, but it doesn’t.
  3. Finally the actual stack trace you received is a real error due to the input not having shape information, but the error just doesn’t clearly state that.

I will make sure 2 + 3 are followed up on with some bug fixes, can you try just doing input_tensor = "dense_4_input". I believe that should fix your problem, it looks like your input shape might also be wrong given its (1,) which would be a single scalar number in tensor form (or 1-length vector if you will).

Thanks! That makes complete sense.

Tho I think on #2, if not named I kinda wonder if that isn’t possible to algorithmically figure out. tensorflow/lite/tools:visualize from the tensorflow projects seems to be able do it. Might see what I can come up with.

For my fellow newbies out there, here’s the final working solution which uses microTVM.

import os
import numpy as np
import tvm
import tvm.micro as micro
from tvm.contrib import graph_runtime, util

from tvm import relay
from tvm.contrib.download import download_testdata

TARGET = 'c -device=micro_dev'

model_dir ="/home/tgall/tvm/utvm-exp/"
tflite_model_file = os.path.join(model_dir, "sine_model.tflite")
tflite_model_buf = open(tflite_model_file, "rb").read()

# Get TFLite model from buffer
try:
    import tflite
    tflite_model = tflite.Model.GetRootAsModel(tflite_model_buf, 0)
    version = tflite_model.Version()
    print ("Model Version: " + version)
except AttributeError:
    import tflite.Model
    tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buf, 0)
    version = tflite_model.Version()
    print ("Model Version: " + str(version))


# input_tensor = "input"
input_tensor = "dense_4_input"
input_shape = (1,)
input_dtype = "float32"

dev_config = micro.device.arm.stm32f746xx.generate_config("127.0.0.1", 6666)

# pdb.set_trace()
mod, params = relay.frontend.from_tflite(tflite_model,
                                         shape_dict={input_tensor: input_shape},
                                         dtype_dict={input_tensor: input_dtype})

with micro.Session(dev_config) as sess:
    ctx = tvm.micro_dev(0)

    disable_vectorize = tvm.target.build_config(disable_vectorize=True)
    disable_fusion = relay.build_config(disabled_pass={'FuseOps'})
    with disable_vectorize, disable_fusion:
        graph, c_mod, params = relay.build(mod, target=TARGET, params=params)
    micro_mod = micro.create_micro_mod(c_mod, dev_config)
    mod = graph_runtime.create(graph, micro_mod, ctx)
    mod.set_input(**params)
    
    #throw a simple single bogus number at the model
    mod.set_input(input_tensor, tvm.nd.array(np.array([0.5], dtype="float32")))

    mod.run()

    # Get output
    tvm_output = mod.get_output(0).asnumpy()

    print("result is: "+str(tvm_output))