Does Relay VM executor support gradients?

I want to know does the Relay VM executor support gradients?
i tried to use VM executor to test op gradients (tvm/tests/python/relay/test_op_level1.py)
but it can’t carried out

def test_unary_op():
  def check_single_op(opfunc, ref):
    shape = (10, 4)
    dtype = 'float32'
    tp = relay.TensorType(shape, dtype)
    x = relay.var("x", tp)
    y = opfunc(x)

    if ref is not None:
        data = np.random.rand(*shape).astype(dtype)
        ref_grad = ref(data)
        fwd_func = relay.Function([x], y)
        fwd_func = run_infer_type(fwd_func)
        bwd_func = run_infer_type(gradient(fwd_func))

        for target, ctx in ctx_list():
            intrp = relay.create_executor('vm', ctx=ctx, target=target)
            op_res, (op_grad, ) = intrp.evaluate(bwd_func)(data)
            np.testing.assert_allclose(op_grad.asnumpy(), ref_grad, rtol=0.01)

for opfunc, ref in [(tvm.relay.log, lambda x: 1 / x),
                    (tvm.relay.exp, np.exp),
                    (tvm.relay.sigmoid, lambda x: sigmoid(x) * (1 - sigmoid(x))),
                    (tvm.relay.tanh, lambda x: 1 - np.tanh(x) * np.tanh(x)),
                    (tvm.relay.sqrt, lambda x: 0.5 * np.power(x, -0.5)),
                    (tvm.relay.abs, lambda x: np.where(x < 0, -np.ones_like(x), np.ones_like(x))),
                    (relay.nn.relu, lambda x: np.where(x < 0, np.zeros_like(x), np.ones_like(x))),
                    (tvm.relay.cos, lambda x: -1.0 * np.sin(x)),
                    (tvm.relay.sin, lambda x: np.cos(x)),
                    (tvm.relay.atan, lambda x: 1 / (1 + np.power(x, 2.0)))]:
    check_single_op(opfunc, ref)

below is the log information

/usr/bin/python3.6 /home/tvm/tests/python/relay/test_op_grad_level1.py
Traceback (most recent call last):

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 122, in <module>
    test_unary_op()

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 67, in test_unary_op
    check_single_op(opfunc, ref)

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 54, in check_single_op
    op_res, (op_grad, ) = intrp.evaluate(bwd_func)(data)

  File "/home/tvm/python/tvm/relay/backend/interpreter.py", line 251, in evaluate
    return self._make_executor(expr)

  File "/home/tvm/python/tvm/relay/backend/vm.py", line 429, in _make_executor
    main = self.mod["main"]

  File "/home/tvm/python/tvm/relay/module.py", line 115, in __getitem__
    return _module.Module_Lookup_str(self, var)

  File "/home/tvm/python/tvm/_ffi/_ctypes/function.py", line 207, in __call__
    raise get_last_ffi_error()

tvm._ffi.base.TVMError: Traceback (most recent call last):
  [bt] (4) /home/tvm/build/libtvm.so(TVMFuncCall+0x65) [0x7fe08bd201e5]
  [bt] (3) /home/tvm/build/libtvm.so(+0xb28b9a) [0x7fe08bc29b9a]
  [bt] (2) /home/tvm/build/libtvm.so(tvm::relay::ModuleNode::Lookup(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const+0x29) [0x7fe08bc29ac9]
  [bt] (1) /home/tvm/build/libtvm.so(tvm::relay::ModuleNode::GetGlobalVar(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const+0x217) [0x7fe08bc29a57]
  [bt] (0) /home/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x43) [0x7fe08b4ea4b3]
  File "/home/tvm/src/relay/ir/module.cc", line 70
TVMError: Check failed: it != global_var_map_.end(): Cannot find global var main in the Module

@MarisaKirisame
@wweic
if anyone has some suggestion, plz tell me, thanks very much

@Ruinhuang This is merely some internal API thing.
When you do create_executor with vm, you must have a module with a main defined.
You can create a module, set it’s main to be that function, and everything will work.

@MarisaKirisame
Thanks for reply. I learned from your tvm-jit branch. And i tried to create a module. but it seems that something is wrong. here is my code

def test_unary_op():
    def check_single_op(opfunc, ref):
        shape = (10, 4)
        dtype = 'float32'
        tp = relay.TensorType(shape, dtype)
        x = relay.var("x", tp)
        y = opfunc(x)

        if ref is not None:
            mod = relay.Module()

            data = np.random.rand(*shape).astype(dtype)
            ref_grad = ref(data)
            fwd_func = relay.Function([x], y)
            fwd_func = run_infer_type(fwd_func)
            bwd_func = run_infer_type(gradient(fwd_func))

            mod["main"] = bwd_func

            for target, ctx in ctx_list():
                intrp = relay.create_executor('vm', ctx=ctx, target=target)
                op_res, (op_grad, ) = intrp.evaluate()(data)
                np.testing.assert_allclose(op_grad.asnumpy(), ref_grad, rtol=0.01)

    for opfunc, ref in [(tvm.relay.log, lambda x: 1 / x),
                        (tvm.relay.exp, np.exp),
                        (tvm.relay.sigmoid, lambda x: sigmoid(x) * (1 - sigmoid(x))),
                        (tvm.relay.tanh, lambda x: 1 - np.tanh(x) * np.tanh(x)),
                        (tvm.relay.sqrt, lambda x: 0.5 * np.power(x, -0.5)),
                        (tvm.relay.abs, lambda x: np.where(x < 0, -np.ones_like(x), np.ones_like(x))),
                        (relay.nn.relu, lambda x: np.where(x < 0, np.zeros_like(x), np.ones_like(x))),
                        (tvm.relay.cos, lambda x: -1.0 * np.sin(x)),
                        (tvm.relay.sin, lambda x: np.cos(x))]:
        check_single_op(opfunc, ref)

and here is error log:

Traceback (most recent call last):

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 101, in <module>
    test_unary_op()

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 66, in test_unary_op
    check_single_op(opfunc, ref)

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 54, in check_single_op
    op_res, (op_grad, ) = intrp.evaluate()(data)

  File "/home/tvm/python/tvm/relay/backend/interpreter.py", line 240, in evaluate
    return self._make_executor()

  File "/home/tvm/python/tvm/relay/backend/vm.py", line 215, in _make_executor
    main = self.mod["main"]

  File "/home/tvm/python/tvm/relay/module.py", line 106, in __getitem__
    return _module.Module_Lookup_str(self, var)

  File "/home/tvm/python/tvm/_ffi/_ctypes/function.py", line 210, in __call__
    raise get_last_ffi_error()

tvm._ffi.base.TVMError: Traceback (most recent call last):
  [bt] (4) /home/tvm/build/libtvm.so(TVMFuncCall+0x65) [0x7f2c11cb1e05]
  [bt] (3) /home/tvm/build/libtvm.so(+0x83de0d) [0x7f2c11997e0d]
  [bt] (2) /home/tvm/build/libtvm.so(tvm::relay::ModuleNode::Lookup(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const+0x29) [0x7f2c11997d09]
  [bt] (1) /home/tvm/build/libtvm.so(tvm::relay::ModuleNode::GetGlobalVar(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const+0x1fb) [0x7f2c11997c9b]
  [bt] (0) /home/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x43) [0x7f2c1153b2f3]
  File "/home/tvm/src/relay/ir/module.cc", line 66
TVMError: Check failed: it != global_var_map_.end(): Cannot find global var main in the Module

Is the intrp.evaluate()(data) passed the wrong parameter? i also tried intrp.evaluate(mod)(data) and intrp.evaluate(bwd_func)(data) but they are all wrong

create_executor take a mod, just do mod=mod

@MarisaKirisame i have tried, but it can not work :joy: :joy:

 def test_unary_op():
    def check_single_op(opfunc, ref):
        shape = (10, 4)
        dtype = 'float32'
        tp = relay.TensorType(shape, dtype)
        x = relay.var("x", tp)
        y = opfunc(x)

    if ref is not None:
        mod = relay.Module()

        data = np.random.rand(*shape).astype(dtype)
        ref_grad = ref(data)
        fwd_func = relay.Function([x], y)
        fwd_func = run_infer_type(fwd_func)
        bwd_func = run_infer_type(gradient(fwd_func))

        mod["main"] = bwd_func

        for target, ctx in ctx_list():
            intrp = relay.create_executor(kind='vm', mod=mod, ctx=ctx, target=target)
            op_res, (op_grad, ) = intrp.evaluate()(data)
            np.testing.assert_allclose(op_grad.asnumpy(), ref_grad, rtol=0.01)

for opfunc, ref in [(tvm.relay.log, lambda x: 1 / x),
                    (tvm.relay.exp, np.exp),
                    (tvm.relay.sigmoid, lambda x: sigmoid(x) * (1 - sigmoid(x))),
                    (tvm.relay.tanh, lambda x: 1 - np.tanh(x) * np.tanh(x)),
                    (tvm.relay.sqrt, lambda x: 0.5 * np.power(x, -0.5)),
                    (tvm.relay.abs, lambda x: np.where(x < 0, -np.ones_like(x), np.ones_like(x))),
                    (relay.nn.relu, lambda x: np.where(x < 0, np.zeros_like(x), np.ones_like(x))),
                    (tvm.relay.cos, lambda x: -1.0 * np.sin(x)),
                    (tvm.relay.sin, lambda x: np.cos(x)),
                    (tvm.relay.atan, lambda x: 1 / (1 + np.power(x, 2.0)))]:
    check_single_op(opfunc, ref)

this is the error log:

Traceback (most recent call last):

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 100, in <module>
    test_unary_op()

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 65, in test_unary_op
    check_single_op(opfunc, ref)

  File "/home/tvm/tests/python/relay/test_op_grad_level1.py", line 52, in check_single_op
    intrp = relay.create_executor(kind='vm', mod=mod, ctx=ctx, target=target)

  File "/home/tvm/python/tvm/relay/build_module.py", line 296, in create_executor
    return VMExecutor(mod, ctx, target)

  File "/home/tvm/python/tvm/relay/backend/vm.py", line 211, in __init__
    self.vm = compiler.compile(mod, target)

  File "/home/tvm/python/tvm/relay/backend/vm.py", line 180, in compile
    self._compile(mod, target, target_host)

  File "/home/tvm/python/tvm/_ffi/_ctypes/function.py", line 210, in __call__
    raise get_last_ffi_error()

tvm._ffi.base.TVMError: TVMError: Do not have a default for relay.RefCreate

@Ruinhuang so, the relay vm does not support reference. However, you can call partial eval, followed by deadcode elimination to remove all reference in your program. You can look at test_pass_partial_eval and test_dead_code_elimination to see how it work. Another thing you can do is use the first order gradient instead.

@MarisaKirisame ok,got it. thanks anyway~ :+1::+1: