diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 06ac37b7f83ed6f0ef6d25d71d7fa128aca58edc..848c59e3b8a531206187181e02b0fbfad2f6b6e9 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -120,6 +120,7 @@ statement
     | ADD identifier .*?                                               #addResource
     | SET ROLE .*?                                                     #failNativeCommand
     | SET .*?                                                          #setConfiguration
+    | RESET                                                            #resetConfiguration
     | unsupportedHiveNativeCommands .*?                                #failNativeCommand
     ;
 
@@ -633,7 +634,7 @@ nonReserved
     | GROUPING | CUBE | ROLLUP
     | EXPLAIN | FORMAT | LOGICAL | FORMATTED | CODEGEN
     | TABLESAMPLE | USE | TO | BUCKET | PERCENTLIT | OUT | OF
-    | SET
+    | SET | RESET
     | VIEW | REPLACE
     | IF
     | NO | DATA
@@ -748,6 +749,7 @@ MAP: 'MAP';
 STRUCT: 'STRUCT';
 COMMENT: 'COMMENT';
 SET: 'SET';
+RESET: 'RESET';
 DATA: 'DATA';
 START: 'START';
 TRANSACTION: 'TRANSACTION';
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 2966eefd07c772eb9d872945ff9bb22e622dc60b..2e3ac9706daf8cf003959452a6b22db93e282c79 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -75,6 +75,18 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
     }
   }
 
+  /**
+   * Create a [[ResetCommand]] logical plan.
+   * Example SQL :
+   * {{{
+   *   RESET;
+   * }}}
+   */
+  override def visitResetConfiguration(
+      ctx: ResetConfigurationContext): LogicalPlan = withOrigin(ctx) {
+    ResetCommand
+  }
+
   /**
    * Create an [[AnalyzeTableCommand]] command. This currently only implements the NOSCAN
    * option (other options are passed on to Hive) e.g.:
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
index 282f26ce998fea14fdb936569067056920185b34..b0e2d03af070d8c6cde2b9d53b7b5796de926db1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
@@ -116,3 +116,19 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm
   override def run(sparkSession: SparkSession): Seq[Row] = runFunc(sparkSession)
 
 }
+
+/**
+ * This command is for resetting SQLConf to the default values. Command that runs
+ * {{{
+ *   reset;
+ * }}}
+ */
+case object ResetCommand extends RunnableCommand with Logging {
+
+  override def run(sparkSession: SparkSession): Seq[Row] = {
+    sparkSession.sessionState.conf.clear()
+    Seq.empty[Row]
+  }
+
+  override val output: Seq[Attribute] = Seq.empty
+}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 0296229100a24871a7fc4d46bab970da00987eb6..f8227e3bd6ee8b1ccea0daece0788a84efaed66b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -99,7 +99,7 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
   test("deprecated property") {
     spark.sqlContext.conf.clear()
     val original = spark.conf.get(SQLConf.SHUFFLE_PARTITIONS)
-    try{
+    try {
       sql(s"set ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS}=10")
       assert(spark.conf.get(SQLConf.SHUFFLE_PARTITIONS) === 10)
     } finally {
@@ -107,6 +107,53 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
     }
   }
 
+  test("reset - public conf") {
+    spark.sqlContext.conf.clear()
+    val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL)
+    try {
+      assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
+      sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false")
+      assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false)
+      assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1)
+      sql(s"reset")
+      assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
+      assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0)
+    } finally {
+      sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original")
+    }
+  }
+
+  test("reset - internal conf") {
+    spark.sqlContext.conf.clear()
+    val original = spark.conf.get(SQLConf.NATIVE_VIEW)
+    try {
+      assert(spark.conf.get(SQLConf.NATIVE_VIEW) === true)
+      sql(s"set ${SQLConf.NATIVE_VIEW.key}=false")
+      assert(spark.conf.get(SQLConf.NATIVE_VIEW) === false)
+      assert(sql(s"set").where(s"key = '${SQLConf.NATIVE_VIEW.key}'").count() == 1)
+      sql(s"reset")
+      assert(spark.conf.get(SQLConf.NATIVE_VIEW) === true)
+      assert(sql(s"set").where(s"key = '${SQLConf.NATIVE_VIEW.key}'").count() == 0)
+    } finally {
+      sql(s"set ${SQLConf.NATIVE_VIEW}=$original")
+    }
+  }
+
+  test("reset - user-defined conf") {
+    spark.sqlContext.conf.clear()
+    val userDefinedConf = "x.y.z.reset"
+    try {
+      assert(spark.conf.getOption(userDefinedConf).isEmpty)
+      sql(s"set $userDefinedConf=false")
+      assert(spark.conf.get(userDefinedConf) === "false")
+      assert(sql(s"set").where(s"key = '$userDefinedConf'").count() == 1)
+      sql(s"reset")
+      assert(spark.conf.getOption(userDefinedConf).isEmpty)
+    } finally {
+      spark.conf.unset(userDefinedConf)
+    }
+  }
+
   test("invalid conf value") {
     spark.sqlContext.conf.clear()
     val e = intercept[IllegalArgumentException] {
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 1402e0a687290291cf1b27aa0adb054002a38eef..33ff8aee799692b46adc06ffd67d000c2c29ce07 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -32,8 +32,8 @@ import org.apache.hadoop.hive.common.{HiveInterruptCallback, HiveInterruptUtils}
 import org.apache.hadoop.hive.conf.HiveConf
 import org.apache.hadoop.hive.ql.Driver
 import org.apache.hadoop.hive.ql.exec.Utilities
-import org.apache.hadoop.hive.ql.processors.{AddResourceProcessor, CommandProcessor,
-  CommandProcessorFactory, SetProcessor}
+import org.apache.hadoop.hive.ql.processors.{AddResourceProcessor, CommandProcessor}
+import org.apache.hadoop.hive.ql.processors.{CommandProcessorFactory, ResetProcessor, SetProcessor}
 import org.apache.hadoop.hive.ql.session.SessionState
 import org.apache.thrift.transport.TSocket
 
@@ -312,7 +312,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
       if (proc != null) {
         // scalastyle:off println
         if (proc.isInstanceOf[Driver] || proc.isInstanceOf[SetProcessor] ||
-          proc.isInstanceOf[AddResourceProcessor]) {
+          proc.isInstanceOf[AddResourceProcessor] || proc.isInstanceOf[ResetProcessor]) {
           val driver = new SparkSQLDriver
 
           driver.init()