diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
index d22eb10ad399f82ff77f39123b1bac7a955ecffd..fe2873e0be34dceeadc405dab95703a20cf5feb0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
@@ -55,9 +55,9 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
     }.foreach(println)
 
   */
-  
-  val f = children.size match {
-    case 0 => 
+
+  private[this] val f = children.size match {
+    case 0 =>
       val func = function.asInstanceOf[() => Any]
       (input: Row) => {
         func()
@@ -956,7 +956,7 @@ case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expressi
   }
 
   // scalastyle:on
-  val converter = CatalystTypeConverters.createToCatalystConverter(dataType)
+  private[this] val converter = CatalystTypeConverters.createToCatalystConverter(dataType)
   override def eval(input: Row): Any = converter(f(input))
 
 }