diff --git a/llvm/projects/keras/frontend/approxhpvm_translator.py b/llvm/projects/keras/frontend/approxhpvm_translator.py index f6f989ef85d2ac05adeff5c34fa14c6e2ee3ad8e..13f61876206a63333689758dd1cc41af2dc0767f 100644 --- a/llvm/projects/keras/frontend/approxhpvm_translator.py +++ b/llvm/projects/keras/frontend/approxhpvm_translator.py @@ -351,6 +351,8 @@ class TensorRtTranslator: inst_str = "void* " + output_var + " = " inst_str += "tensor" + func_name + "(" + input_var + "); \n" + print ("***** inst_str = ", inst_str, "\n") + return inst_str @@ -392,7 +394,6 @@ class TensorRtTranslator: else: inst_str += "1); \n" - self.program_str += inst_str @@ -421,21 +422,22 @@ class TensorRtTranslator: # NOTE: Changing output variable out_var_name1 = out_var_name2 - - if self.hasActivation(cur_node): - activation_type = cur_node.activation_type - out_var_name3 = self.getVariableName(cur_node) - - inst_str = self.genActivationCall(out_var_name1, out_var_name3, activation_type) - self.program_str += inst_str - if layer_type == "Activation": input_var_name = self.getSingleInputName(cur_node) inst_str = self.genActivationCall(input_var_name, out_var_name1, cur_node.activation_type) + self.program_str += inst_str + + + if self.hasActivation(cur_node) and layer_type != "Activation": + activation_type = cur_node.activation_type + out_var_name3 = self.getVariableName(cur_node) + + inst_str = self.genActivationCall(out_var_name1, out_var_name3, activation_type) self.program_str += inst_str + if layer_type == "BatchNormalization": input_var_name = self.getSingleInputName(cur_node) @@ -482,7 +484,9 @@ class TensorRtTranslator: # Skip visited nodes if cur_node.layer_name in visited_nodes: return - + + print ("-visiting = ", cur_node.layer_name, "\n") + if dfg.predVisited(cur_node, visited_nodes): visited_nodes[cur_node.layer_name] = True