Skip to content
Snippets Groups Projects
Commit 727b720a authored by Hashim Sharif's avatar Hashim Sharif
Browse files

Removing HPVM function arguments + Fixing Twine bug

parent 3bc5f65d
No related branches found
No related tags found
No related merge requests found
......@@ -110,7 +110,8 @@ bool CGT_CUDNN::isValidOperandForInPlaceOperation(Value *Op,
DEBUG(errs() << *Arg << "\t: argument, not suitable for in place\n");
return false;
}
} else {
}
else {
// If it is not an argument, then it needs to be the result of
// another intrinsic. These are new objects that are allocated,
// and consumed by next intrinsic.
......@@ -125,8 +126,8 @@ bool CGT_CUDNN::isValidOperandForInPlaceOperation(Value *Op,
}
}
void CGT_CUDNN::init() {
// FIXME: what to do here? If anything?
}
// Initialize the VISC runtime API. This makes it easier to insert these calls
......@@ -172,12 +173,12 @@ void CGT_CUDNN::initRuntimeAPI() {
}
void CGT_CUDNN::codeGen(DFInternalNode* N) {
errs () << "Inside node: " << N->getFuncPointer()->getName() << "\n";
errs () << "Skipping internal node\n";
}
void CGT_CUDNN::codeGen(DFLeafNode* N) {
// Skip code generation if it is a dummy node
......@@ -200,6 +201,18 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
// Get the function associated with the dataflow node
Function *F = N->getFuncPointer();
errs()<<"function name = "<< F->getName()<<"\n";
/* Removing HPVM in/out/inout function attributes */
for(Function::arg_iterator ai = F->arg_begin(), ae = F->arg_end(); ai != ae; ai++){
Argument *Arg = &*ai;
if(Arg->hasAttribute(Attribute::In))
Arg->removeAttr(Attribute::In);
if(Arg->hasAttribute(Attribute::Out))
Arg->removeAttr(Attribute::Out);
if(Arg->hasAttribute(Attribute::InOut))
Arg->removeAttr(Attribute::InOut);
}
// Look up if we have visited this function before. If we have, then just
// get the cloned function pointer from DFNode. Otherwise, create the cloned
......@@ -208,13 +221,14 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
assert((F_cudnn == NULL) &&
"Error: Visiting a node for which code already generated");
// Clone the function
ValueToValueMapTy VMap;
Twine FName = F->getName();
std::string FName(F->getName().data());
F_cudnn = CloneFunction(F, VMap);
F_cudnn->setName(FName+"_cudnn");
F_cudnn->removeFromParent();
F_cudnn->setName(FName + "_cudnn");
errs()<<"Cloned function name = "<<F_cudnn->getName()<<"\n";
F_cudnn->removeFromParent();
M.getFunctionList().push_back(F_cudnn);
N->addGenFunc(F_cudnn, visc::CUDNN_TARGET, true);
......@@ -269,6 +283,7 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
// Create cudnn runtime function call
Constant* tensorGemmGPU;
DECLARE(tensorGemmGPU);
CallInst* CI = CallInst::Create(tensorGemmGPU,
Args, "", II);
// We can replace the call to hpvm.tensor.mul with the runtime call
......@@ -299,7 +314,7 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
// Create cudnn runtime function call
Constant* tensorAdd;
DECLARE(tensorAdd);
CallInst* CI = CallInst::Create(tensorAdd, Args, "", II);
CallInst::Create(tensorAdd, Args, "", II);
// We can replace the call to hpvm.tensor.add with the 1st argument
// that, due to in place operation, now contains the result
II->replaceAllUsesWith(II->getOperand(0));
......@@ -328,7 +343,7 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
// Create cudnn runtime function call
Constant* tensorRelu;
DECLARE(tensorRelu);
CallInst* CI = CallInst::Create(tensorRelu, Args, "", II);
CallInst::Create(tensorRelu, Args, "", II);
// We can replace the call to hpvm.tensor.relu with the 1st argument
// that, due to in place operation, now contains the result
II->replaceAllUsesWith(II->getOperand(0));
......@@ -357,7 +372,7 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
// Create cudnn runtime function call
Constant* tensorSoftmax;
DECLARE(tensorSoftmax);
CallInst* CI = CallInst::Create(tensorSoftmax, Args, "", II);
CallInst::Create(tensorSoftmax, Args, "", II);
// We can replace the call to hpvm.tensor.softmax with the 1st argument
// that, due to in place operation, now contains the result
II->replaceAllUsesWith(II->getOperand(0));
......@@ -382,9 +397,6 @@ void CGT_CUDNN::codeGen(DFLeafNode* N) {
(*ri)->eraseFromParent();
}
//errs() << "-----------------------------------\n";
//errs() << *F_cudnn << "\n";
return;
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment