diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index f10d36862770785b9f20c70b495b81da9e25cdba..f3b45799c568821fbf0dba97a27ea253db18631d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -243,7 +243,7 @@ class CodegenContext {
     mutable.Map(outerClassName -> mutable.Map.empty[String, String])
 
   // Verbatim extra code to be added to the OuterClass.
-  private val extraCode: mutable.ListBuffer[String] = mutable.ListBuffer[String]()
+  private val extraClasses: mutable.ListBuffer[String] = mutable.ListBuffer[String]()
 
   // Returns the size of the most recently added class.
   private def currClassSize(): Int = classSize(classes.head._1)
@@ -332,19 +332,18 @@ class CodegenContext {
   }
 
   /**
-   * Emits any source code added with addExtraCode
+   * Emits extra inner classes added with addExtraCode
    */
   def emitExtraCode(): String = {
-    extraCode.mkString("\n")
+    extraClasses.mkString("\n")
   }
 
   /**
    * Add extra source code to the outermost generated class.
-   * @param code verbatim source code to be added.
+   * @param code verbatim source code of the inner class to be added.
    */
-  def addExtraCode(code: String): Unit = {
-    extraCode.append(code)
-    classSize(outerClassName) += code.length
+  def addInnerClass(code: String): Unit = {
+    extraClasses.append(code)
   }
 
   final val JAVA_BOOLEAN = "boolean"
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala
index abdf9530c6c7b52f966b6a8c6370bb8a8b27f4dd..f424096b330e3a29972900f24cb98fe2b7f80165 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala
@@ -621,7 +621,7 @@ case class HashAggregateExec(
         }
       } else ""
     }
-    ctx.addExtraCode(generateGenerateCode())
+    ctx.addInnerClass(generateGenerateCode())
 
     val doAgg = ctx.freshName("doAggregateWithKeys")
     val peakMemory = metricTerm(ctx, "peakMemory")