How to compile model to Hexagon?

I tried to compile a model to Hexagon using the following code:

with tvm.transform.PassContext(opt_level=1):
    with tvm.target.hexagon() as target:
        graph, lib, params = relay.build(mod, target, params=params)

But got error:

---------------------------------------------------------------------------
TVMError                                  Traceback (most recent call last)
<ipython-input-6-ee7412e11cb1> in <module>
      1 with tvm.transform.PassContext(opt_level=3):
      2     with tvm.target.hexagon() as target:
----> 3         graph, lib, params = relay.build(mod, target, params=params)

/tvm/python/tvm/relay/build_module.py in build(mod, target, target_host, params)
    249     with tophub_context:
    250         bld_mod = BuildModule()
--> 251         graph_json, mod, params = bld_mod.build(mod, target, target_host, params)
    252     return graph_json, mod, params
    253 

/tvm/python/tvm/relay/build_module.py in build(self, mod, target, target_host, params)
    118             self._set_params(params)
    119         # Build the IR module
--> 120         self._build(mod, target, target_host)
    121         # Get artifacts
    122         graph_json = self.get_json()

/tvm/python/tvm/_ffi/_ctypes/packed_func.py in __call__(self, *args)
    223                 self.handle, values, tcodes, ctypes.c_int(num_args),
    224                 ctypes.byref(ret_val), ctypes.byref(ret_tcode)) != 0:
--> 225             raise get_last_ffi_error()
    226         _ = temp_args
    227         _ = args

TVMError: Traceback (most recent call last):
  [bt] (8) /tvm/build/libtvm.so(tvm::relay::backend::MemoizedExprTranslator<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > >::VisitExpr(tvm::RelayExpr const&)+0x139) [0x7f38c4b20699]
  [bt] (7) /tvm/build/libtvm.so(tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::VisitExpr(tvm::RelayExpr const&)+0x170) [0x7f38c4b204c0]
  [bt] (6) /tvm/build/libtvm.so(tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)#6}::_FUN(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)+0x26) [0x7f38c4b0a516]
  [bt] (5) /tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr_(tvm::relay::CallNode const*)+0xecb) [0x7f38c4b1ce4b]
  [bt] (4) /tvm/build/libtvm.so(+0x133cbf3) [0x7f38c4aedbf3]
  [bt] (3) /tvm/build/libtvm.so(tvm::relay::CompileEngineImpl::LowerInternal(tvm::relay::CCacheKey const&)+0x66d) [0x7f38c4af9acd]
  [bt] (2) /tvm/build/libtvm.so(tvm::relay::ScheduleGetter::Create(tvm::relay::Function const&)+0xdf7) [0x7f38c4af67d7]
  [bt] (1) /tvm/build/libtvm.so(tvm::relay::OpImplementation::Schedule(tvm::Attrs const&, tvm::runtime::Array<tvm::te::Tensor, void> const&, tvm::Target const&)+0xb0) [0x7f38c4bb3080]
  [bt] (0) /tvm/build/libtvm.so(+0xa492f9) [0x7f38c41fa2f9]
  File "/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 78, in cfun
    rv = local_pyfunc(*pyargs)
  File "/tvm/python/tvm/relay/op/strategy/generic.py", line 86, in schedule_concatenate
    return topi.generic.schedule_injective(outs)
  File "/tvm/topi/python/topi/generic/injective.py", line 58, in schedule_injective
    raise RuntimeError("schedule_injective not registered for '%s'" % target)
RuntimeError: schedule_injective not registered for 'hexagon llvm -target=hexagon -mcpu=hexagonv66 -mattr=+hvxv66,+hvx-length128b'

Is it not supported yet? Or is there any other way to compile/run the model (e.g. with Hexagon sim runtime)?

Thanks

The codegen for Hexagon is not upstreamed yet, but I will put up a PR within the next couple of weeks. Coming soon.

3 Likes

It was more than a couple of weeks, but here is the PR:

1 Like