diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
index 2409b5d203f40ca204e38feb2bef6cda3cb8c4e0..282f26ce998fea14fdb936569067056920185b34 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql.execution.command
 
-import java.util.NoSuchElementException
-
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.{Row, SparkSession}
 import org.apache.spark.sql.catalyst.expressions.Attribute
@@ -88,7 +86,7 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm
       }
       val schema = StructType(
         StructField("key", StringType, nullable = false) ::
-          StructField("default", StringType, nullable = false) ::
+          StructField("value", StringType, nullable = false) ::
           StructField("meaning", StringType, nullable = false) :: Nil)
       (schema.toAttributes, runFunc)
 
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 7933d12e284f17716806b4cf1ee043d33e513690..518430f16d712f3fd4052b5e03ce18032e8bdc94 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -752,7 +752,7 @@ private[sql] class SQLConf extends Serializable with CatalystConf with Logging {
    */
   def getAllDefinedConfs: Seq[(String, String, String)] = sqlConfEntries.synchronized {
     sqlConfEntries.values.asScala.filter(_.isPublic).map { entry =>
-      (entry.key, entry.defaultValueString, entry.doc)
+      (entry.key, getConfString(entry.key, entry.defaultValueString), entry.doc)
     }.toSeq
   }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 7ead97bbf6937210818ba9d9c6dc1da087eb7daa..81bc973be74a3ed9edcf6913bd637e51f312a95c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.internal
 
-import org.apache.spark.sql.{QueryTest, SparkSession, SQLContext}
+import org.apache.spark.sql.{QueryTest, Row, SparkSession, SQLContext}
 import org.apache.spark.sql.test.{SharedSQLContext, TestSQLContext}
 
 class SQLConfSuite extends QueryTest with SharedSQLContext {
@@ -75,6 +75,27 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
     spark.wrapped.conf.clear()
   }
 
+  test("set command for display") {
+    spark.wrapped.conf.clear()
+    checkAnswer(
+      sql("SET").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
+      Nil)
+
+    checkAnswer(
+      sql("SET -v").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
+      Row("spark.sql.groupByOrdinal", "true"))
+
+    sql("SET spark.sql.groupByOrdinal=false")
+
+    checkAnswer(
+      sql("SET").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
+      Row("spark.sql.groupByOrdinal", "false"))
+
+    checkAnswer(
+      sql("SET -v").where("key = 'spark.sql.groupByOrdinal'").select("key", "value"),
+      Row("spark.sql.groupByOrdinal", "false"))
+  }
+
   test("deprecated property") {
     spark.wrapped.conf.clear()
     val original = spark.conf.get(SQLConf.SHUFFLE_PARTITIONS)