Tensorflow lstm an internal invariant was violated while typechecking

I convert tensorflow pb to tvm, but case a err:

  %318 = @tensor_array_read_float32_1_256(%87, %317);
  %319 = @tensor_get_data_float32_1_256(%318);
  %320 = expand_dims(%319, axis=0);
  %321 = take(%88, 58);
  %322 = @tensor_array_read_float32_1_256(%87, %321);
  %323 = @tensor_get_data_float32_1_256(%322);
  %324 = expand_dims(%323, axis=0);
  %325 = (%92, %96, %100, %104, %108, %112, %116, %120, %124, %128, %132, %136, %140, %144, %148, %152, %156, %160, %164, %168, %172, %176, %180, %184, %188, %192, %196, %200, %204, %208, %212, %216, %220, %224, %228, %232, %236, %240, %244, %248, %252, %256, %260, %264, %268, %272, %276, %280, %284, %288, %292, %296, %300, %304, %308, %312, %316, %320, %324);
  %326 = concatenate(%325);
  strided_slice(%326, meta[relay.Constant][28], meta[relay.Constant][29], meta[relay.Constant][30], begin=[58, 0, 0], end=[0, 1, 256], strides=[1, 1, 1])
}
// meta data omitted. you can use show_meta_data=True to include meta data

[15:35:18] /workspace/tvm/src/printer/doc.cc:55: text node: ' an internal invariant was violated while typechecking your program [15:35:18] /workspace/tvm/src/relay/op/tensor/transform.cc:1950: Check failed: begin_v <= end_v (58 vs. 0) : strided_slice get invalid slice at axis 0
Stack trace:
  [bt] (0) /workspace/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x67) [0x7f0b41bf5b27]
  [bt] (1) /workspace/tvm/build/libtvm.so(tvm::relay::StridedSliceRel(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)+0xd67) [0x7f0b422344e7]
  [bt] (2) /workspace/tvm/build/libtvm.so(void tvm::runtime::TypedPackedFunc<bool (tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>::AssignTypedLambda<bool (*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>(bool (*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}::operator()(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*) const+0x210) [0x7f0b420af300]
  [bt] (3) /workspace/tvm/build/libtvm.so(tvm::relay::TypeSolver::Solve()+0x3c5) [0x7f0b4231bbe5]
  [bt] (4) /workspace/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Infer(tvm::RelayExpr)+0x55) [0x7f0b42468e85]
  [bt] (5) /workspace/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::IRModule const&, tvm::GlobalVar const&)+0x1f1) [0x7f0b42469671]
  [bt] (6) /workspace/tvm/build/libtvm.so(tvm::RunTypeCheck(tvm::IRModule const&, tvm::GlobalVar const&, tvm::relay::Function)+0x287) [0x7f0b41d0e297]
  [bt] (7) /workspace/tvm/build/libtvm.so(tvm::IRModuleNode::Add(tvm::GlobalVar const&, tvm::BaseFunc const&, bool)+0xda) [0x7f0b41d11bea]
  [bt] (8) /workspace/tvm/build/libtvm.so(+0xc3acb6) [0x7f0b41d13cb6]

; ' should not has tab or newline.
Traceback (most recent call last):
  File "convert2tvm.py", line 163, in <module>
    convert(in_path.format('dga_lstm_test'), out_path.format('dga_lstm_test'), shape_dict, output_or_dtype)
  File "convert2tvm.py", line 78, in pb2tvm
    outputs=output_name
  File "/workspace/tvm/python/tvm/relay/frontend/tensorflow.py", line 3574, in from_tensorflow
    mod, params = g.from_tensorflow(graph, layout, shape, outputs)
  File "/workspace/tvm/python/tvm/relay/frontend/tensorflow.py", line 2967, in from_tensorflow
    func = self._get_relay_func(graph, layout=layout, shape=shape, outputs=outputs)
  File "/workspace/tvm/python/tvm/relay/frontend/tensorflow.py", line 2926, in _get_relay_func
    self._backtrack_construct(node.name)
  File "/workspace/tvm/python/tvm/relay/frontend/tensorflow.py", line 3508, in _backtrack_construct
    op = self._convert_operator(node.op, inputs, attr, self._graph)
  File "/workspace/tvm/python/tvm/relay/frontend/tensorflow.py", line 3365, in _convert_operator
    sym = convert_map[op_name](inputs, attrs, self._params, self._mod)
  File "/workspace/tvm/python/tvm/relay/frontend/tensorflow.py", line 1529, in _impl
    out_shape = _infer_shape(out, mod=mod)
  File "/workspace/tvm/python/tvm/relay/frontend/common.py", line 487, in infer_shape
    out_type = infer_type(inputs, mod=mod)
  File "/workspace/tvm/python/tvm/relay/frontend/common.py", line 461, in infer_type
    mod["main"] = _function.Function([], node)
  File "/workspace/tvm/python/tvm/ir/module.py", line 74, in __setitem__
    return self._add(var, val)
  File "/workspace/tvm/python/tvm/ir/module.py", line 83, in _add
    _ffi_api.Module_Add(self, var, val, update)
  File "/workspace/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 225, in __call__
    raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
  [bt] (8) /workspace/tvm/build/libtvm.so(TVMFuncCall+0x61) [0x7f0b42619621]
  [bt] (7) /workspace/tvm/build/libtvm.so(+0xc3b6d4) [0x7f0b41d146d4]
  [bt] (6) /workspace/tvm/build/libtvm.so(+0xc3acb6) [0x7f0b41d13cb6]
  [bt] (5) /workspace/tvm/build/libtvm.so(tvm::IRModuleNode::Add(tvm::GlobalVar const&, tvm::BaseFunc const&, bool)+0xda) [0x7f0b41d11bea]
  [bt] (4) /workspace/tvm/build/libtvm.so(tvm::RunTypeCheck(tvm::IRModule const&, tvm::GlobalVar const&, tvm::relay::Function)+0x287) [0x7f0b41d0e297]
  [bt] (3) /workspace/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::IRModule const&, tvm::GlobalVar const&)+0x1f1) [0x7f0b42469671]
  [bt] (2) /workspace/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Infer(tvm::RelayExpr)+0x86) [0x7f0b42468eb6]
  [bt] (1) /workspace/tvm/build/libtvm.so(tvm::ErrorReporter::RenderErrors(tvm::IRModule const&, bool)+0x2172) [0x7f0b41cfc342]
  [bt] (0) /workspace/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x67) [0x7f0b41bf5b27]
  [bt] (8) /workspace/tvm/build/libtvm.so(+0xc3acb6) [0x7f0b41d13cb6]
  [bt] (7) /workspace/tvm/build/libtvm.so(tvm::IRModuleNode::Add(tvm::GlobalVar const&, tvm::BaseFunc const&, bool)+0xda) [0x7f0b41d11bea]
  [bt] (6) /workspace/tvm/build/libtvm.so(tvm::RunTypeCheck(tvm::IRModule const&, tvm::GlobalVar const&, tvm::relay::Function)+0x287) [0x7f0b41d0e297]
  [bt] (5) /workspace/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::IRModule const&, tvm::GlobalVar const&)+0x1f1) [0x7f0b42469671]
  [bt] (4) /workspace/tvm/build/libtvm.so(tvm::relay::TypeInferencer::Infer(tvm::RelayExpr)+0x55) [0x7f0b42468e85]
  [bt] (3) /workspace/tvm/build/libtvm.so(tvm::relay::TypeSolver::Solve()+0x3c5) [0x7f0b4231bbe5]
  [bt] (2) /workspace/tvm/build/libtvm.so(void tvm::runtime::TypedPackedFunc<bool (tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>::AssignTypedLambda<bool (*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)>(bool (*)(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}::operator()(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*) const+0x210) [0x7f0b420af300]
  [bt] (1) /workspace/tvm/build/libtvm.so(tvm::relay::StridedSliceRel(tvm::runtime::Array<tvm::Type, void> const&, int, tvm::Attrs const&, tvm::TypeReporter const&)+0xd67) [0x7f0b422344e7]
  [bt] (0) /workspace/tvm/build/libtvm.so(dmlc::LogMessageFatal::~LogMessageFatal()+0x67) [0x7f0b41bf5b27]
  File "/workspace/tvm/src/ir/error.cc", line 132
TVMError:
Error(s) have occurred. The program has been annotated with them:

anyone met the same error? why and how to fix it? does tvm not support TF’s LSTM? thx very much!