Something wrong when my model run

I have been convert my mxnet model to graph, and there is something wrong happend.

  File "test_code_05.py", line 40, in <module>
    m.run()
  File "/mnt/ebs0/lst/tvm/tvm/python/tvm/contrib/graph_runtime.py", line 168, in run
    self._run()
  File "/mnt/ebs0/lst/tvm/tvm/python/tvm/_ffi/_ctypes/function.py", line 209, in __call__
    raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
  [bt] (3) /mnt/ebs0/lst/tvm/tvm/build/libtvm.so(TVMFuncCall+0x46) [0x2abe560a7556]
  [bt] (2) /mnt/ebs0/lst/tvm/tvm/build/libtvm.so(tvm::runtime::GraphRuntime::Run()+0x47) [0x2abe560f6a37]
  [bt] (1) /mnt/ebs0/lst/tvm/tvm/build/libtvm.so(+0x10c9b72) [0x2abe560f8b72]
  [bt] (0) /mnt/ebs0/lst/tvm/tvm/build/libtvm.so(+0x1086a51) [0x2abe560b5a51]
  File "/mnt/ebs0/lst/tvm/tvm/src/runtime/module_util.cc", line 73
TVMError: Check failed: ret == 0 (-1 vs. 0) : Assert fail: (num_args == 4), fused_nn_softmax: num_args should be 4

And there is my code

# -*- coding: utf-8 -*-
import mxnet as mx
from mxnet.gluon.model_zoo import vision
import tvm
import tvm.relay as relay
import numpy as np
import time
import os
from tvm.contrib import graph_runtime
from tvm.contrib.download import download_testdata
from mxnet.gluon.model_zoo.vision import get_model

batch_shape = (1, 3, 1600, 1600)
shape_dict = {'data': batch_shape}
img_url = 'https://github.com/dmlc/mxnet.js/blob/master/data/cat.png?raw=true'
img_name = 'cat.png'

x = np.zeros(batch_shape)
sym, arg_params, aux_params = mx.model.load_checkpoint('pixellink--', 0)
# arg_params.pop('dense0_bias')
# arg_params.pop('dense0_weight')
mod, params = relay.frontend.from_mxnet(sym, shape_dict,
                                              arg_params=arg_params, aux_params=aux_params)
func = mod["main"]
func = relay.Function(func.params, relay.nn.softmax(func.body), None, func.type_params, func.attrs)
print('start build config...')
target = 'cuda'
print(func, type(mod))
with relay.build_config(opt_level=3):
    graph, lib, params = relay.build(func, target, params=params)

ctx = tvm.gpu(0)
dtype = 'float32'
print('graph create...')
m = graph_runtime.create(graph, lib, ctx)
print('set input...')
m.set_input('data', tvm.nd.array(x.astype(dtype)))
m.set_input(**params)
m.run()
tvm_output = m.get_output(0)
# top1 = np.argmax(tvm_output.asnumpy()[0])


sym, arg_params, aux_params = mx.model.load_checkpoint('pixellink--', 0)
# now we use the same API to get Relay computation graph
os.environ['MXNET_USE_TENSORRT'] = '0'
executor = sym.simple_bind(ctx=mx.gpu(
    0), data=batch_shape, grad_req='null', force_rebind=True)
executor.copy_params_from(arg_params, aux_params)

y_gen = executor.forward(is_train=False, data=x)
y_gen[0].wait_to_read()

Have you solved this problem, i have this same problem, how to solve, thanks!

after remove this line

func = relay.Function(func.params, relay.nn.softmax(func.body), None, func.type_params, func.attrs)

the bug is fixed