Skip to content
Snippets Groups Projects
Commit 9525804b authored by Hashim Sharif's avatar Hashim Sharif
Browse files

Removing stray code comments

parent 2a771b57
No related branches found
No related tags found
No related merge requests found
......@@ -80,7 +80,6 @@ class DFG:
self.add_dfg_edge(layer.input[j].op.name, dfg_node)
else:
#if self.hasSingleInput(layer):
print (layer.input.name)
self.add_dfg_edge(layer.input.name, dfg_node)
......@@ -183,21 +182,13 @@ class DFGNode:
print ("padding = ", self.padding);
if layer_type == "BatchNormalization":
#print (layer.__dir__())
self.epsilon = layer.epsilon
self.beta = layer.beta
self.gamma = layer.gamma
self.moving_mean = layer.moving_mean
self.moving_variance = layer.moving_variance
#print("moving mean = ", self.moving_mean, "\n")
#print (self.moving_mean.__dir__())
#print("moving variance = ", self.moving_variance, "\n")
#print (self.moving_variance.__dir__())
#print("gamma = ", self.gamma, "\n")
#print("beta = ", self.beta, "\n")
......@@ -280,9 +271,8 @@ class TensorRtTranslator:
print ("Input_type = ", cur_node.inputs[0].layer_type)
# NOTE: Assuming the 'inference' phase - hence skipping Dropout
pred_layer_type = cur_node.inputs[0].layer_type
# FIXME: Assuming the 'inference' phase - hence skipping Dropout
#if pred_layer_type == "Flatten" or pred_layer_type == "Dropout":
if self.isSkipLayer(pred_layer_type):
cur_node = self.getPrevActiveLayer(cur_node)
......@@ -726,9 +716,7 @@ class TensorRtTranslator:
if test_data is not None and self.dfg.last_node is not None:
last_node = self.dfg.last_node
output_var = self.output_map[last_node.layer_name]
#accuracy_call = "\ncomputeAccuracy2(labels," + str(len(test_data)) + "," + output_var + "); \n"
#self.program_str += accuracy_call
destructors = "\nllvm_hpvm_cleanupTensorRt(); \n"
self.program_str += destructors
......@@ -804,8 +792,7 @@ class TensorRtTranslator:
def endBatchLoop(self):
end_loop_str = ""
#end_loop_str += "\nuint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); \n"
#end_loop_str += "\nuint32_t* labels = readLabelsBatch2(labels_path.c_str(),start,end); \n"
end_loop_str += "\nuint32_t* labels = readLabelsBatch3(labels_path.c_str(),start,end); \n"
last_node = self.dfg.last_node
......
......@@ -65,8 +65,9 @@ class HPVMTranslator:
return "input"
print ("Input_type = ", cur_node.inputs[0].layer_type)
pred_layer_type = cur_node.inputs[0].layer_type
# FIXME: Assuming the 'inference' phase - hence skipping Dropout
# NOTE: Assuming the 'inference' phase - hence skipping Dropout
#if pred_layer_type == "Flatten" or pred_layer_type == "Dropout":
if self.isSkipLayer(pred_layer_type):
cur_node = self.getPrevActiveLayer(cur_node)
......@@ -228,7 +229,6 @@ class HPVMTranslator:
hpvm_edge_str += " __visc__bindIn(" + out_var_name + ", " + str(index2) + ", 1, 0); \n"
elif input_var_name in self.hpvm_node_names:
#input_index = self.output_map[input_var_name]
hpvm_edge_str += " __visc__edge(" + input_var_name + ", " + out_var_name + ", 1, 0, 0, 0); \n"
hpvm_edge_str += " __visc__edge(" + input_var_name + ", " + out_var_name + ", 1, 1, 1, 0); \n"
......@@ -241,7 +241,6 @@ class HPVMTranslator:
hpvm_edge_str += " __visc__bindIn(" + out_var_name + ", " + str(index2) + ", 3, 0); \n"
elif input_var_name2 in self.hpvm_node_names:
#input_index = self.output_map[input_var_name2]
hpvm_edge_str += " __visc__edge(" + input_var_name2 + ", " + out_var_name + ", 1, 0, 2, 0); \n"
hpvm_edge_str += " __visc__edge(" + input_var_name2 + ", " + out_var_name + ", 1, 1, 3, 0); \n"
......@@ -259,7 +258,7 @@ class HPVMTranslator:
footer_str = self.genNodeFooter(2)
inst_str += footer_str
#self.genHpvmNodeVar(out_var_name)
input_var_name = self.getSingleInputName(cur_node)
weight_name = cur_node.layer_name + "_w"
......@@ -497,11 +496,9 @@ class HPVMTranslator:
self.genDenseNode(cur_node)
if nodeHasBias(cur_node):
#if self.hasBiasAdd(cur_node):
self.genBiasNode(cur_node)
if nodeHasActivation(cur_node) and layer_type != "Activation":
#if self.hasActivation(cur_node):
self.genSubActivationNode(cur_node)
if layer_type == "Activation":
......@@ -637,7 +634,6 @@ class HPVMTranslator:
main_func_str += "hpvm_request_tensor(result, 0); \n\n"
main_func_str += "__visc__cleanup(); \n "
#main_func_str += "computeAccuracy2(labels, " + str(len(test_data)) + ", result); \n"
main_func_str += "computeAccuracy3(labels, result); \n"
main_func_str += "return 0; \n\n"
main_func_str += "} \n"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment