diff --git a/hpvm/projects/keras/frontend/approxhpvm_translator.py b/hpvm/projects/keras/frontend/approxhpvm_translator.py
index ff75415a0f47eefa88f43ccbd01f42f02ae28dd9..7515bf67da0d4cc604bfcd92450fe78579fc56ac 100644
--- a/hpvm/projects/keras/frontend/approxhpvm_translator.py
+++ b/hpvm/projects/keras/frontend/approxhpvm_translator.py
@@ -521,17 +521,16 @@ class TensorRtTranslator:
       activation_type = cur_node.activation_type
       out_var_name3 = self.getVariableName(cur_node)    
 
-      inst_str = genActivationCallStr(out_var_name1, out_var_name3, activation_type)
-      self.program_str += inst_str  
-
       if activation_type == "softmax":
         print ("Softmax canNOT be part of Dense/Conv Op. Insert: Activation('softmax');")
         sys.exit(0)
+        
+      inst_str = genActivationCallStr(out_var_name1, out_var_name3, activation_type)
+      self.program_str += inst_str  
 
-      #self.json_str += activation_type + "_" + str(self.op_count) + " : 0, \n"
-      #self.op_count += 1
       self.addBaselineKnob(activation_type)
-      
+
+        
 
     if layer_type == "BatchNormalization":
       input_var_name = self.getSingleInputName(cur_node)
@@ -1074,6 +1073,15 @@ def getUniquePath(weights_dir):
   
 
 
+def createRecursiveDir(target_dir):
+
+  toks = target_dir.split("/")
+  for i in range(len(toks)):
+    path_str = "/".join(toks[0:i+1])
+    if not os.path.exists(path_str):
+      os.mkdir(path_str)
+  
+
 
 #***** Top level External Function ******* 
 def translate_to_approxhpvm(model,
@@ -1088,10 +1096,11 @@ def translate_to_approxhpvm(model,
 
   if not reload_weights:
     weights_dir = getUniquePath(weights_dir)
-    os.mkdir(weights_dir)   
+    createRecursiveDir(weights_dir)
+    
 
   src_dir = getUniquePath(src_dir)
-  os.mkdir(src_dir)   
+  createRecursiveDir(src_dir)
     
   dfg = DFG()    
   for i in range(len(model.layers)):
@@ -1122,7 +1131,6 @@ def translate_to_approxhpvm(model,
 
   
   
-  
   if reload_weights:
     print ("NOTE: Using existing pretrained weights \n")
   else: