diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala index 0e89f71dc1cf3a3833a72443f042ee452a298150..0284ecc0d9f75b16f9c15ecf556cfed2bc748028 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala @@ -413,10 +413,10 @@ object StructType extends AbstractDataType { StructType(fields.asScala) } - protected[sql] def fromAttributes(attributes: Seq[Attribute]): StructType = + private[sql] def fromAttributes(attributes: Seq[Attribute]): StructType = StructType(attributes.map(a => StructField(a.name, a.dataType, a.nullable, a.metadata))) - def removeMetadata(key: String, dt: DataType): DataType = + private[sql] def removeMetadata(key: String, dt: DataType): DataType = dt match { case StructType(fields) => val newFields = fields.map { f => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala index 18a312801072dde3256c4959da50beaae7c41351..84fdcfea3c8f35305db262ed84ba67444b6b83eb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala @@ -544,7 +544,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex } } - test("Do not push down filters incorrectly when inner name and outer name are the same") { + test("SPARK-16371 Do not push down filters when inner name and outer name are the same") { withParquetDataFrame((1 to 4).map(i => Tuple1(Tuple1(i)))) { implicit df => // Here the schema becomes as below: //