Skip to content
Snippets Groups Projects
Commit f1096cae authored by Hashim Sharif's avatar Hashim Sharif
Browse files

DepthwiseConv2D working with HPVM frontend translator

parent 0a3b3254
No related branches found
No related tags found
No related merge requests found
...@@ -336,24 +336,23 @@ class TensorRtTranslator: ...@@ -336,24 +336,23 @@ class TensorRtTranslator:
return False return False
def genActivationCall(self, input_var, output_var, activation_type): #def genActivationCall(self, input_var, output_var, activation_type):
func_name = "" # func_name = ""
if activation_type == "tanh": # if activation_type == "tanh":
func_name = "Tanh" # func_name = "Tanh"
if activation_type == "relu":
func_name = "Relu"
if activation_type == "softmax": # if activation_type == "relu":
func_name = "Softmax" # func_name = "Relu"
inst_str = "void* " + output_var + " = " # if activation_type == "softmax":
inst_str += "tensor" + func_name + "(" + input_var + "); \n" # func_name = "Softmax"
print ("***** inst_str = ", inst_str, "\n") # inst_str = "void* " + output_var + " = "
# inst_str += "tensor" + func_name + "(" + input_var + "); \n"
# print ("***** inst_str = ", inst_str, "\n")
return inst_str # return inst_str
...@@ -425,8 +424,8 @@ class TensorRtTranslator: ...@@ -425,8 +424,8 @@ class TensorRtTranslator:
if layer_type == "Activation": if layer_type == "Activation":
input_var_name = self.getSingleInputName(cur_node) input_var_name = self.getSingleInputName(cur_node)
inst_str = self.genActivationCall(input_var_name, out_var_name1, cur_node.activation_type) inst_str = genActivationCallStr(input_var_name, out_var_name1, cur_node.activation_type)
self.program_str += inst_str self.program_str += inst_str
...@@ -434,7 +433,7 @@ class TensorRtTranslator: ...@@ -434,7 +433,7 @@ class TensorRtTranslator:
activation_type = cur_node.activation_type activation_type = cur_node.activation_type
out_var_name3 = self.getVariableName(cur_node) out_var_name3 = self.getVariableName(cur_node)
inst_str = self.genActivationCall(out_var_name1, out_var_name3, activation_type) inst_str = genActivationCallStr(out_var_name1, out_var_name3, activation_type)
self.program_str += inst_str self.program_str += inst_str
......
import sys import sys
from frontend.utils import *
class HPVMTranslator: class HPVMTranslator:
...@@ -285,6 +286,50 @@ class HPVMTranslator: ...@@ -285,6 +286,50 @@ class HPVMTranslator:
self.root_str += self.genHpvmNodeEdges(out_var_name, input_var_name, weight_name) self.root_str += self.genHpvmNodeEdges(out_var_name, input_var_name, weight_name)
def genDepthwiseConvNode(self, cur_node):
#input_var_name = self.getSingleInputName(cur_node)
out_var_name = self.getVariableName(cur_node)
header_str = self.genNodeHeader(out_var_name, 2)
inst_str = header_str
weights = cur_node.weights
strides = cur_node.strides
padding = 0
if cur_node.padding.strip() == "valid":
padding = 0
else:
padding = cur_node.padding
padding = int((weights.shape[0] - 1) / 2)
prev_padding = self.getPrevLayerPadding(cur_node)
if prev_padding != None:
# FIXME: currently only supporting symmetric padding
padding = prev_padding[0][0]
inst_str += " void *r = __visc__tensor_group_convolution(t1, t2, "
inst_str += str(padding) + ", "
inst_str += str(padding) + ", "
inst_str += str(strides[0]) + ", "
inst_str += str(strides[1]) + ", "
inst_str += "1, "
C = weights.shape[2]
inst_str += str(C) + "); \n"
footer_str = self.genNodeFooter(2)
inst_str += footer_str
self.node_str += inst_str
input_var_name = self.getSingleInputName(cur_node)
weight_name = cur_node.layer_name + "_w"
self.root_str += self.genHpvmNodeEdges(out_var_name, input_var_name, weight_name)
def genBiasNode(self, cur_node): def genBiasNode(self, cur_node):
input_var_name = self.output_map[cur_node.layer_name] input_var_name = self.output_map[cur_node.layer_name]
...@@ -384,13 +429,18 @@ class HPVMTranslator: ...@@ -384,13 +429,18 @@ class HPVMTranslator:
if layer_type == "Conv2D": if layer_type == "Conv2D":
self.genConvNode(cur_node) self.genConvNode(cur_node)
if layer_type == "DepthwiseConv2D":
self.genDepthwiseConvNode(cur_node)
if layer_type == "Dense": if layer_type == "Dense":
self.genDenseNode(cur_node) self.genDenseNode(cur_node)
if self.hasBiasAdd(cur_node): if nodeHasBias(cur_node):
#if self.hasBiasAdd(cur_node):
self.genBiasNode(cur_node) self.genBiasNode(cur_node)
if self.hasActivation(cur_node): if nodeHasActivation(cur_node) and layer_type != "Activation":
#if self.hasActivation(cur_node):
self.genSubActivationNode(cur_node) self.genSubActivationNode(cur_node)
if layer_type == "Activation": if layer_type == "Activation":
...@@ -536,11 +586,7 @@ class HPVMTranslator: ...@@ -536,11 +586,7 @@ class HPVMTranslator:
def generateSourceProgram(self, dir_prefix): def generateSourceProgram(self, dir_prefix):
#print (self.node_str)
#print (self.root_str)
#print (self.root_struct_str)
#print (self.main_func_str)
program_str = self.file_header_str + self.node_str + self.root_str program_str = self.file_header_str + self.node_str + self.root_str
program_str += self.root_struct_str + self.main_func_str program_str += self.root_struct_str + self.main_func_str
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment