From 0a3b325463781ce0891bd41646d1d142d00cd40b Mon Sep 17 00:00:00 2001 From: Hashim Sharif <hsharif3@tyler.cs.illinois.edu> Date: Thu, 11 Jul 2019 16:39:02 -0500 Subject: [PATCH] Handling DepthwiseConv2D for Promise API translation --- .../keras/frontend/promise_translator.py | 28 +++++++++++++++---- llvm/projects/keras/frontend/utils.py | 22 +++++++++++++++ 2 files changed, 44 insertions(+), 6 deletions(-) diff --git a/llvm/projects/keras/frontend/promise_translator.py b/llvm/projects/keras/frontend/promise_translator.py index ab8008293d..f5a269f1af 100644 --- a/llvm/projects/keras/frontend/promise_translator.py +++ b/llvm/projects/keras/frontend/promise_translator.py @@ -3,7 +3,7 @@ import numpy as np import sys from keras import backend as K - +from frontend.utils import * class State: @@ -599,13 +599,29 @@ class PromiseRtTranslator: # FIX: ADD code for TensorAdd and ACTIVATION # TODO: ADD code for TensorAdd and ACTIVATION - - print (promise_layer_str) - + + input_var = output_var + if nodeHasBias(conv_op): + output_var2 = self.getVariableName(conv_op) + promise_layer_str += "void* " + output_var2 + " = " + promise_layer_str += "tensorAdd(" + input_var + ", " + promise_layer_str += conv_op.layer_name + "_b" + promise_layer_str += "); \n" + + # Update variable that holds input for next operation + input_var = output_var2 + + + if nodeHasActivation(conv_op): + activation_type = conv_op.activation_type + output_var = self.getVariableName(conv_op) + promise_layer_str += genActivationCallStr(input_var, output_var, activation_type) + + + print (promise_layer_str) self.program_str += promise_layer_str - - + self.appendLayerString("DepthwiseConv", state) state.clear() diff --git a/llvm/projects/keras/frontend/utils.py b/llvm/projects/keras/frontend/utils.py index 6343dd9eec..c4bbe1cde8 100644 --- a/llvm/projects/keras/frontend/utils.py +++ b/llvm/projects/keras/frontend/utils.py @@ -17,3 +17,25 @@ def nodeHasActivation(cur_node): return True else: return False + + +def genActivationCallStr(input_var, output_var, activation_type): + + func_name = "" + if activation_type == "tanh": + func_name = "Tanh" + + if activation_type == "relu": + func_name = "Relu" + + if activation_type == "softmax": + func_name = "Softmax" + + inst_str = "void* " + output_var + " = " + inst_str += "tensor" + func_name + "(" + input_var + "); \n" + + print ("***** inst_str = ", inst_str, "\n") + + return inst_str + + -- GitLab