The following combination of operators (max_pool3d
+ reshape
+ matmul
) and shapes (1,8,2,2,512)
makes relay.build
to hang for llvm
import tensorflow as tf
import numpy as np
from tvm import relay
dtype='float32'
input_name = "input"
dshape=(1,8,2,2,512)
mm_shape=(16384,32)
mm_weights = np.random.random_sample(mm_shape).astype(dtype)
with tf.Session() as sess:
x = tf.placeholder(shape=dshape, dtype=dtype, name=input_name)
mp1 = tf.nn.max_pool3d(x, ksize=[1,8,1,1,1], padding="SAME", strides=[1,1,1,1,1])
rsh1 = tf.reshape(mp1, [-1, 16384])
mm1 = tf.matmul(rsh1, mm_weights, name="matmul")
graph_def = sess.graph_def
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess,
graph_def,
["matmul",])
mod, params = relay.frontend.from_tensorflow(output_graph_def, layout='NCHW', shape={input_name: dshape})
target = "llvm"
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod, target, params=params)
print("Compilation done")