diff --git a/llvm/projects/keras/frontend/promise_translator.py b/llvm/projects/keras/frontend/promise_translator.py
index ab8008293d39ccc1278e0a45bc51a1ed974a1439..f5a269f1afe33a950ec33ad72d7136d72d005586 100644
--- a/llvm/projects/keras/frontend/promise_translator.py
+++ b/llvm/projects/keras/frontend/promise_translator.py
@@ -3,7 +3,7 @@
 import numpy as np
 import sys
 from keras import backend as K
-
+from frontend.utils import *
 
 
 class State:
@@ -599,13 +599,29 @@ class PromiseRtTranslator:
 
     # FIX: ADD code for TensorAdd and ACTIVATION
     # TODO: ADD code for TensorAdd and ACTIVATION
-    
-    print (promise_layer_str)
-    
+
+    input_var = output_var
+    if nodeHasBias(conv_op):
+      output_var2 = self.getVariableName(conv_op)    
+      promise_layer_str += "void* " + output_var2 + " = "
+      promise_layer_str += "tensorAdd(" + input_var + ", "
+      promise_layer_str += conv_op.layer_name + "_b"
+      promise_layer_str += "); \n"
+
+      # Update variable that holds input for next operation
+      input_var = output_var2
+
+
+    if nodeHasActivation(conv_op):
+      activation_type = conv_op.activation_type
+      output_var = self.getVariableName(conv_op)    
+      promise_layer_str += genActivationCallStr(input_var, output_var, activation_type)  
+
+      
+    print (promise_layer_str)    
     self.program_str += promise_layer_str
 
-   
-    
+       
     self.appendLayerString("DepthwiseConv", state)
 
     state.clear()
diff --git a/llvm/projects/keras/frontend/utils.py b/llvm/projects/keras/frontend/utils.py
index 6343dd9eecef7113e871ebe45d07d5d359a1181a..c4bbe1cde84732f36fcac46e7f5094223bcbcef0 100644
--- a/llvm/projects/keras/frontend/utils.py
+++ b/llvm/projects/keras/frontend/utils.py
@@ -17,3 +17,25 @@ def nodeHasActivation(cur_node):
     return True
   else:
     return False
+
+
+def genActivationCallStr(input_var, output_var, activation_type):
+ 
+  func_name = ""
+  if activation_type == "tanh":
+    func_name = "Tanh"
+
+  if activation_type == "relu":
+    func_name = "Relu"
+
+  if activation_type == "softmax":
+    func_name = "Softmax"
+
+  inst_str = "void* " + output_var + " = "
+  inst_str += "tensor" + func_name + "(" + input_var + "); \n"
+
+  print ("***** inst_str = ", inst_str, "\n")
+    
+  return inst_str
+
+