diff --git a/llvm/projects/keras/frontend/approxhpvm_translator.py b/llvm/projects/keras/frontend/approxhpvm_translator.py
index 13f61876206a63333689758dd1cc41af2dc0767f..d58a61921eccd3dee68eca07d42a62dc7b2ebfcf 100644
--- a/llvm/projects/keras/frontend/approxhpvm_translator.py
+++ b/llvm/projects/keras/frontend/approxhpvm_translator.py
@@ -336,24 +336,23 @@ class TensorRtTranslator:
     return False
 
 
-  def genActivationCall(self, input_var, output_var, activation_type):
-
-    func_name = ""
-    if activation_type == "tanh":
-      func_name = "Tanh"
-
-    if activation_type == "relu":
-      func_name = "Relu"
+  #def genActivationCall(self, input_var, output_var, activation_type):
+ 
+  #  func_name = ""
+  #  if activation_type == "tanh":
+  #    func_name = "Tanh"
 
-    if activation_type == "softmax":
-      func_name = "Softmax"
+  #  if activation_type == "relu":
+  #    func_name = "Relu"
 
-    inst_str = "void* " + output_var + " = "
-    inst_str += "tensor" + func_name + "(" + input_var + "); \n"
+  #  if activation_type == "softmax":
+  #    func_name = "Softmax"
 
-    print ("***** inst_str = ", inst_str, "\n")
+  #  inst_str = "void* " + output_var + " = "
+  #  inst_str += "tensor" + func_name + "(" + input_var + "); \n"
+  #  print ("***** inst_str = ", inst_str, "\n")
     
-    return inst_str
+  #  return inst_str
 
   
       
@@ -425,8 +424,8 @@ class TensorRtTranslator:
 
     if layer_type == "Activation":
       input_var_name = self.getSingleInputName(cur_node)
-
-      inst_str = self.genActivationCall(input_var_name, out_var_name1, cur_node.activation_type)
+      
+      inst_str = genActivationCallStr(input_var_name, out_var_name1, cur_node.activation_type)
       self.program_str += inst_str
       
     
@@ -434,7 +433,7 @@ class TensorRtTranslator:
       activation_type = cur_node.activation_type
       out_var_name3 = self.getVariableName(cur_node)    
 
-      inst_str = self.genActivationCall(out_var_name1, out_var_name3, activation_type)
+      inst_str = genActivationCallStr(out_var_name1, out_var_name3, activation_type)
       self.program_str += inst_str  
 
         
diff --git a/llvm/projects/keras/frontend/hpvm_dfg_translator.py b/llvm/projects/keras/frontend/hpvm_dfg_translator.py
index 8f33a4c5e0b96c873827d3d53baa842b97477170..3ddb675c7b01e94c102d4e37d116729953cf0287 100644
--- a/llvm/projects/keras/frontend/hpvm_dfg_translator.py
+++ b/llvm/projects/keras/frontend/hpvm_dfg_translator.py
@@ -1,5 +1,6 @@
 
 import sys
+from frontend.utils import *
         
 
 class HPVMTranslator:
@@ -285,6 +286,50 @@ class HPVMTranslator:
     self.root_str +=  self.genHpvmNodeEdges(out_var_name, input_var_name, weight_name)
 
 
+  def genDepthwiseConvNode(self, cur_node):
+    #input_var_name = self.getSingleInputName(cur_node)
+    out_var_name = self.getVariableName(cur_node)
+    
+    header_str = self.genNodeHeader(out_var_name, 2)
+    inst_str = header_str 
+
+    weights = cur_node.weights
+    strides = cur_node.strides
+
+    padding = 0
+    if cur_node.padding.strip() == "valid":
+      padding = 0
+    else:
+      padding = cur_node.padding      
+      padding = int((weights.shape[0] - 1) / 2)
+
+    prev_padding = self.getPrevLayerPadding(cur_node)
+    if prev_padding != None:
+      # FIXME: currently only supporting symmetric padding
+      padding = prev_padding[0][0]        
+      
+    inst_str += "  void *r = __visc__tensor_group_convolution(t1, t2, "
+    inst_str += str(padding) + ", "
+    inst_str += str(padding) + ", "
+    inst_str += str(strides[0]) + ", "
+    inst_str += str(strides[1]) + ", " 
+    inst_str += "1, " 
+
+    C = weights.shape[2]
+    inst_str += str(C) + "); \n"
+
+    footer_str = self.genNodeFooter(2)
+    inst_str += footer_str
+        
+    self.node_str += inst_str
+
+    input_var_name = self.getSingleInputName(cur_node)
+    weight_name = cur_node.layer_name + "_w"
+    
+    self.root_str +=  self.genHpvmNodeEdges(out_var_name, input_var_name, weight_name)
+
+    
+
 
   def genBiasNode(self, cur_node):
     input_var_name = self.output_map[cur_node.layer_name]
@@ -384,13 +429,18 @@ class HPVMTranslator:
     if layer_type == "Conv2D":
       self.genConvNode(cur_node)      
 
+    if layer_type == "DepthwiseConv2D":
+      self.genDepthwiseConvNode(cur_node)
+      
     if layer_type == "Dense":
       self.genDenseNode(cur_node)      
-      
-    if self.hasBiasAdd(cur_node):
+
+    if nodeHasBias(cur_node):  
+    #if self.hasBiasAdd(cur_node):
       self.genBiasNode(cur_node)
-            
-    if self.hasActivation(cur_node):
+
+    if nodeHasActivation(cur_node) and layer_type != "Activation":  
+    #if self.hasActivation(cur_node):
       self.genSubActivationNode(cur_node)     
       
     if layer_type == "Activation":
@@ -536,11 +586,7 @@ class HPVMTranslator:
     
 
   def generateSourceProgram(self, dir_prefix):
-    #print (self.node_str)
-    #print (self.root_str)
-    #print (self.root_struct_str)
-    #print (self.main_func_str)
-
+    
     program_str = self.file_header_str + self.node_str + self.root_str
     program_str += self.root_struct_str + self.main_func_str