diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala
index 1c3196ae2e7b6d9de8f1837ce259af78bb0218dd..915f551fb2f018c3c43483805b80e4ceae4ca551 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala
@@ -32,6 +32,7 @@ class SparkSqlSerializer(conf: SparkConf) extends KryoSerializer(conf) {
     kryo.setRegistrationRequired(false)
     kryo.register(classOf[MutablePair[_, _]])
     kryo.register(classOf[Array[Any]])
+    kryo.register(classOf[scala.collection.immutable.Map$Map1], new MapSerializer)
     kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericRow])
     kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericMutableRow])
     kryo.register(classOf[scala.collection.mutable.ArrayBuffer[_]])
@@ -70,3 +71,20 @@ class BigDecimalSerializer extends Serializer[BigDecimal] {
     BigDecimal(input.readString())
   }
 }
+
+/**
+ * Maps do not have a no arg constructor and so cannot be serialized by default. So, we serialize
+ * them as `Array[(k,v)]`.
+ */
+class MapSerializer extends Serializer[Map[_,_]] {
+  def write(kryo: Kryo, output: Output, map: Map[_,_]) {
+    kryo.writeObject(output, map.flatMap(e => Seq(e._1, e._2)).toArray)
+  }
+
+  def read(kryo: Kryo, input: Input, tpe: Class[Map[_,_]]): Map[_,_] = {
+    kryo.readObject(input, classOf[Array[Any]])
+      .sliding(2,2)
+      .map { case Array(k,v) => (k,v) }
+      .toMap
+  }
+}