Skip to content
Snippets Groups Projects
Commit ea8cea82 authored by Kan Zhang's avatar Kan Zhang Committed by Patrick Wendell
Browse files

[SPARK-1570] Fix classloading in JavaSQLContext.applySchema

I think I hit a class loading issue when running JavaSparkSQL example using spark-submit in local mode.

Author: Kan Zhang <kzhang@apache.org>

Closes #484 from kanzhang/SPARK-1570 and squashes the following commits:

feaaeba [Kan Zhang] [SPARK-1570] Fix classloading in JavaSQLContext.applySchema
parent 0ea0b1a2
No related branches found
No related tags found
No related merge requests found
...@@ -28,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRow ...@@ -28,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRow
import org.apache.spark.sql.catalyst.types._ import org.apache.spark.sql.catalyst.types._
import org.apache.spark.sql.parquet.ParquetRelation import org.apache.spark.sql.parquet.ParquetRelation
import org.apache.spark.sql.execution.{ExistingRdd, SparkLogicalPlan} import org.apache.spark.sql.execution.{ExistingRdd, SparkLogicalPlan}
import org.apache.spark.util.Utils
/** /**
* The entry point for executing Spark SQL queries from a Java program. * The entry point for executing Spark SQL queries from a Java program.
...@@ -84,10 +85,11 @@ class JavaSQLContext(sparkContext: JavaSparkContext) { ...@@ -84,10 +85,11 @@ class JavaSQLContext(sparkContext: JavaSparkContext) {
*/ */
def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): JavaSchemaRDD = { def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): JavaSchemaRDD = {
val schema = getSchema(beanClass) val schema = getSchema(beanClass)
val className = beanClass.getCanonicalName val className = beanClass.getName
val rowRdd = rdd.rdd.mapPartitions { iter => val rowRdd = rdd.rdd.mapPartitions { iter =>
// BeanInfo is not serializable so we must rediscover it remotely for each partition. // BeanInfo is not serializable so we must rediscover it remotely for each partition.
val localBeanInfo = Introspector.getBeanInfo(Class.forName(className)) val localBeanInfo = Introspector.getBeanInfo(
Class.forName(className, true, Utils.getContextOrSparkClassLoader))
val extractors = val extractors =
localBeanInfo.getPropertyDescriptors.filterNot(_.getName == "class").map(_.getReadMethod) localBeanInfo.getPropertyDescriptors.filterNot(_.getName == "class").map(_.getReadMethod)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment