aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorKan Zhang <kzhang@apache.org>2014-04-22 15:05:12 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-04-22 15:05:12 -0700
commitea8cea82a02099bb66f1e77b757e4d96cc31d6e2 (patch)
treefdc1a603ae1d360869f0640ed1dab503ab828e63 /sql
parent0ea0b1a2d66044af97ab84bf3014ff8dfe91fee4 (diff)
downloadspark-ea8cea82a02099bb66f1e77b757e4d96cc31d6e2.tar.gz
spark-ea8cea82a02099bb66f1e77b757e4d96cc31d6e2.tar.bz2
spark-ea8cea82a02099bb66f1e77b757e4d96cc31d6e2.zip
[SPARK-1570] Fix classloading in JavaSQLContext.applySchema
I think I hit a class loading issue when running JavaSparkSQL example using spark-submit in local mode. Author: Kan Zhang <kzhang@apache.org> Closes #484 from kanzhang/SPARK-1570 and squashes the following commits: feaaeba [Kan Zhang] [SPARK-1570] Fix classloading in JavaSQLContext.applySchema
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala6
1 files changed, 4 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
index 26922f7f33..a734708879 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
@@ -28,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRow
import org.apache.spark.sql.catalyst.types._
import org.apache.spark.sql.parquet.ParquetRelation
import org.apache.spark.sql.execution.{ExistingRdd, SparkLogicalPlan}
+import org.apache.spark.util.Utils
/**
* The entry point for executing Spark SQL queries from a Java program.
@@ -84,10 +85,11 @@ class JavaSQLContext(sparkContext: JavaSparkContext) {
*/
def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): JavaSchemaRDD = {
val schema = getSchema(beanClass)
- val className = beanClass.getCanonicalName
+ val className = beanClass.getName
val rowRdd = rdd.rdd.mapPartitions { iter =>
// BeanInfo is not serializable so we must rediscover it remotely for each partition.
- val localBeanInfo = Introspector.getBeanInfo(Class.forName(className))
+ val localBeanInfo = Introspector.getBeanInfo(
+ Class.forName(className, true, Utils.getContextOrSparkClassLoader))
val extractors =
localBeanInfo.getPropertyDescriptors.filterNot(_.getName == "class").map(_.getReadMethod)