aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYin Huai <huai@cse.ohio-state.edu>2014-08-01 11:14:53 -0700
committerMichael Armbrust <michael@databricks.com>2014-08-01 11:14:53 -0700
commitc41fdf04f4beebe36379396b0c4fff3ab7ad3cf4 (patch)
tree040e0bcbd00f11badffa5d884359eb68f5449c19
parent8d338f64c4eda45d22ae33f61ef7928011cc2846 (diff)
downloadspark-c41fdf04f4beebe36379396b0c4fff3ab7ad3cf4.tar.gz
spark-c41fdf04f4beebe36379396b0c4fff3ab7ad3cf4.tar.bz2
spark-c41fdf04f4beebe36379396b0c4fff3ab7ad3cf4.zip
[SPARK-2179][SQL] A minor refactoring Java data type APIs (2179 follow-up).
It is a follow-up PR of SPARK-2179 (https://issues.apache.org/jira/browse/SPARK-2179). It makes package names of data type APIs more consistent across languages (Scala: `org.apache.spark.sql`, Java: `org.apache.spark.sql.api.java`, Python: `pyspark.sql`). Author: Yin Huai <huai@cse.ohio-state.edu> Closes #1712 from yhuai/javaDataType and squashes the following commits: 62eb705 [Yin Huai] Move package-info. add4bcb [Yin Huai] Make the package names of data type classes consistent across languages by moving all Java data type classes to package sql.api.java.
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/ArrayType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/ArrayType.java)6
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/BinaryType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/BinaryType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/BooleanType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/BooleanType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/ByteType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/ByteType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/DataType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/DataType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/DecimalType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/DecimalType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/DoubleType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/DoubleType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/FloatType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/FloatType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/IntegerType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/IntegerType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/LongType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/LongType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/MapType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/MapType.java)6
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/ShortType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/ShortType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/StringType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/StringType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/StructField.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/StructField.java)4
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/StructType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/StructType.java)7
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/TimestampType.java (renamed from sql/core/src/main/java/org/apache/spark/sql/api/java/types/TimestampType.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/package-info.java (renamed from sql/core/src/main/scala/org/apache/spark/sql/package-info.java)2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/api/java/types/package-info.java22
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala60
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSchemaRDD.scala1
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala30
-rw-r--r--sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java3
-rw-r--r--sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala59
24 files changed, 108 insertions, 118 deletions
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/ArrayType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/ArrayType.java
index 17334ca31b..b73a371e93 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/ArrayType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/ArrayType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing Lists.
@@ -25,8 +25,8 @@ package org.apache.spark.sql.api.java.types;
* {@code null} values.
*
* To create an {@link ArrayType},
- * {@link org.apache.spark.sql.api.java.types.DataType#createArrayType(DataType)} or
- * {@link org.apache.spark.sql.api.java.types.DataType#createArrayType(DataType, boolean)}
+ * {@link DataType#createArrayType(DataType)} or
+ * {@link DataType#createArrayType(DataType, boolean)}
* should be used.
*/
public class ArrayType extends DataType {
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/BinaryType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/BinaryType.java
index 6170317985..7daad60f62 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/BinaryType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/BinaryType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing byte[] values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/BooleanType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/BooleanType.java
index 8fa24d85d1..5a1f527256 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/BooleanType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/BooleanType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing boolean and Boolean values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/ByteType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/ByteType.java
index 2de32978e2..e5cdf06b21 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/ByteType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/ByteType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing byte and Byte values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/DataType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/DataType.java
index f84e5a490a..3eccddef88 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/DataType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/DataType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
import java.util.HashSet;
import java.util.List;
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/DecimalType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/DecimalType.java
index 9250491a2d..bc54c078d7 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/DecimalType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/DecimalType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing java.math.BigDecimal values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/DoubleType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/DoubleType.java
index 3e86917fdd..f0060d0bcf 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/DoubleType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/DoubleType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing double and Double values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/FloatType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/FloatType.java
index fa860d4017..4a6a37f691 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/FloatType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/FloatType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing float and Float values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/IntegerType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/IntegerType.java
index bd973eca2c..bfd70490bb 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/IntegerType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/IntegerType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing int and Integer values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/LongType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/LongType.java
index e00233304c..af13a46eb1 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/LongType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/LongType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing long and Long values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/MapType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/MapType.java
index 94936e2e4e..063e6b34ab 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/MapType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/MapType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing Maps. A MapType object comprises two fields,
@@ -27,8 +27,8 @@ package org.apache.spark.sql.api.java.types;
* For values of a MapType column, keys are not allowed to have {@code null} values.
*
* To create a {@link MapType},
- * {@link org.apache.spark.sql.api.java.types.DataType#createMapType(DataType, DataType)} or
- * {@link org.apache.spark.sql.api.java.types.DataType#createMapType(DataType, DataType, boolean)}
+ * {@link DataType#createMapType(DataType, DataType)} or
+ * {@link DataType#createMapType(DataType, DataType, boolean)}
* should be used.
*/
public class MapType extends DataType {
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/ShortType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/ShortType.java
index 98f9507acf..7d7604b4e3 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/ShortType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/ShortType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing short and Short values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/StringType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/StringType.java
index b8e7dbe646..f4ba0c07c9 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/StringType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/StringType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing String values.
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/StructField.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/StructField.java
index 54e9c11ea4..b48e2a2c5f 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/StructField.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/StructField.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* A StructField object represents a field in a StructType object.
@@ -26,7 +26,7 @@ package org.apache.spark.sql.api.java.types;
* values.
*
* To create a {@link StructField},
- * {@link org.apache.spark.sql.api.java.types.DataType#createStructField(String, DataType, boolean)}
+ * {@link DataType#createStructField(String, DataType, boolean)}
* should be used.
*/
public class StructField {
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/StructType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/StructType.java
index 33a42f4b16..a4b501efd9 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/StructType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/StructType.java
@@ -15,18 +15,17 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
import java.util.Arrays;
-import java.util.List;
/**
* The data type representing Rows.
* A StructType object comprises an array of StructFields.
*
* To create an {@link StructType},
- * {@link org.apache.spark.sql.api.java.types.DataType#createStructType(java.util.List)} or
- * {@link org.apache.spark.sql.api.java.types.DataType#createStructType(StructField[])}
+ * {@link DataType#createStructType(java.util.List)} or
+ * {@link DataType#createStructType(StructField[])}
* should be used.
*/
public class StructType extends DataType {
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/TimestampType.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/TimestampType.java
index 65295779f7..06d44c731c 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/TimestampType.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/TimestampType.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.java.types;
+package org.apache.spark.sql.api.java;
/**
* The data type representing java.sql.Timestamp values.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/package-info.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/package-info.java
index 5360361451..67007a9f0d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/package-info.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/api/java/package-info.java
@@ -18,4 +18,4 @@
/**
* Allows the execution of relational queries, including those expressed in SQL using Spark.
*/
-package org.apache.spark.sql; \ No newline at end of file
+package org.apache.spark.sql.api.java;
diff --git a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/package-info.java b/sql/core/src/main/java/org/apache/spark/sql/api/java/types/package-info.java
deleted file mode 100644
index f169ac65e2..0000000000
--- a/sql/core/src/main/java/org/apache/spark/sql/api/java/types/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-/**
- * Allows users to get and create Spark SQL data types.
- */
-package org.apache.spark.sql.api.java.types;
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
index c1c18a0cd0..809dd038f9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala
@@ -23,9 +23,8 @@ import org.apache.hadoop.conf.Configuration
import org.apache.spark.annotation.{DeveloperApi, Experimental}
import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
-import org.apache.spark.sql.api.java.types.{StructType => JStructType}
import org.apache.spark.sql.json.JsonRDD
-import org.apache.spark.sql._
+import org.apache.spark.sql.{SQLContext, StructType => SStructType}
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRow, Row => ScalaRow}
import org.apache.spark.sql.parquet.ParquetRelation
import org.apache.spark.sql.execution.{ExistingRdd, SparkLogicalPlan}
@@ -104,9 +103,9 @@ class JavaSQLContext(val sqlContext: SQLContext) {
* provided schema. Otherwise, there will be runtime exception.
*/
@DeveloperApi
- def applySchema(rowRDD: JavaRDD[Row], schema: JStructType): JavaSchemaRDD = {
+ def applySchema(rowRDD: JavaRDD[Row], schema: StructType): JavaSchemaRDD = {
val scalaRowRDD = rowRDD.rdd.map(r => r.row)
- val scalaSchema = asScalaDataType(schema).asInstanceOf[StructType]
+ val scalaSchema = asScalaDataType(schema).asInstanceOf[SStructType]
val logicalPlan =
SparkLogicalPlan(ExistingRdd(scalaSchema.toAttributes, scalaRowRDD))(sqlContext)
new JavaSchemaRDD(sqlContext, logicalPlan)
@@ -133,7 +132,7 @@ class JavaSQLContext(val sqlContext: SQLContext) {
* returning the result as a JavaSchemaRDD.
*/
@Experimental
- def jsonFile(path: String, schema: JStructType): JavaSchemaRDD =
+ def jsonFile(path: String, schema: StructType): JavaSchemaRDD =
jsonRDD(sqlContext.sparkContext.textFile(path), schema)
/**
@@ -155,10 +154,10 @@ class JavaSQLContext(val sqlContext: SQLContext) {
* returning the result as a JavaSchemaRDD.
*/
@Experimental
- def jsonRDD(json: JavaRDD[String], schema: JStructType): JavaSchemaRDD = {
+ def jsonRDD(json: JavaRDD[String], schema: StructType): JavaSchemaRDD = {
val appliedScalaSchema =
Option(asScalaDataType(schema)).getOrElse(
- JsonRDD.nullTypeToStringType(JsonRDD.inferSchema(json.rdd, 1.0))).asInstanceOf[StructType]
+ JsonRDD.nullTypeToStringType(JsonRDD.inferSchema(json.rdd, 1.0))).asInstanceOf[SStructType]
val scalaRowRDD = JsonRDD.jsonStringToRow(json.rdd, appliedScalaSchema)
val logicalPlan =
SparkLogicalPlan(ExistingRdd(appliedScalaSchema.toAttributes, scalaRowRDD))(sqlContext)
@@ -181,22 +180,37 @@ class JavaSQLContext(val sqlContext: SQLContext) {
val fields = beanInfo.getPropertyDescriptors.filterNot(_.getName == "class")
fields.map { property =>
val (dataType, nullable) = property.getPropertyType match {
- case c: Class[_] if c == classOf[java.lang.String] => (StringType, true)
- case c: Class[_] if c == java.lang.Short.TYPE => (ShortType, false)
- case c: Class[_] if c == java.lang.Integer.TYPE => (IntegerType, false)
- case c: Class[_] if c == java.lang.Long.TYPE => (LongType, false)
- case c: Class[_] if c == java.lang.Double.TYPE => (DoubleType, false)
- case c: Class[_] if c == java.lang.Byte.TYPE => (ByteType, false)
- case c: Class[_] if c == java.lang.Float.TYPE => (FloatType, false)
- case c: Class[_] if c == java.lang.Boolean.TYPE => (BooleanType, false)
-
- case c: Class[_] if c == classOf[java.lang.Short] => (ShortType, true)
- case c: Class[_] if c == classOf[java.lang.Integer] => (IntegerType, true)
- case c: Class[_] if c == classOf[java.lang.Long] => (LongType, true)
- case c: Class[_] if c == classOf[java.lang.Double] => (DoubleType, true)
- case c: Class[_] if c == classOf[java.lang.Byte] => (ByteType, true)
- case c: Class[_] if c == classOf[java.lang.Float] => (FloatType, true)
- case c: Class[_] if c == classOf[java.lang.Boolean] => (BooleanType, true)
+ case c: Class[_] if c == classOf[java.lang.String] =>
+ (org.apache.spark.sql.StringType, true)
+ case c: Class[_] if c == java.lang.Short.TYPE =>
+ (org.apache.spark.sql.ShortType, false)
+ case c: Class[_] if c == java.lang.Integer.TYPE =>
+ (org.apache.spark.sql.IntegerType, false)
+ case c: Class[_] if c == java.lang.Long.TYPE =>
+ (org.apache.spark.sql.LongType, false)
+ case c: Class[_] if c == java.lang.Double.TYPE =>
+ (org.apache.spark.sql.DoubleType, false)
+ case c: Class[_] if c == java.lang.Byte.TYPE =>
+ (org.apache.spark.sql.ByteType, false)
+ case c: Class[_] if c == java.lang.Float.TYPE =>
+ (org.apache.spark.sql.FloatType, false)
+ case c: Class[_] if c == java.lang.Boolean.TYPE =>
+ (org.apache.spark.sql.BooleanType, false)
+
+ case c: Class[_] if c == classOf[java.lang.Short] =>
+ (org.apache.spark.sql.ShortType, true)
+ case c: Class[_] if c == classOf[java.lang.Integer] =>
+ (org.apache.spark.sql.IntegerType, true)
+ case c: Class[_] if c == classOf[java.lang.Long] =>
+ (org.apache.spark.sql.LongType, true)
+ case c: Class[_] if c == classOf[java.lang.Double] =>
+ (org.apache.spark.sql.DoubleType, true)
+ case c: Class[_] if c == classOf[java.lang.Byte] =>
+ (org.apache.spark.sql.ByteType, true)
+ case c: Class[_] if c == classOf[java.lang.Float] =>
+ (org.apache.spark.sql.FloatType, true)
+ case c: Class[_] if c == classOf[java.lang.Boolean] =>
+ (org.apache.spark.sql.BooleanType, true)
}
AttributeReference(property.getName, dataType, nullable)()
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSchemaRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSchemaRDD.scala
index 8245741498..4d799b4038 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSchemaRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSchemaRDD.scala
@@ -22,7 +22,6 @@ import java.util.{List => JList}
import org.apache.spark.Partitioner
import org.apache.spark.api.java.{JavaRDDLike, JavaRDD}
import org.apache.spark.api.java.function.{Function => JFunction}
-import org.apache.spark.sql.api.java.types.StructType
import org.apache.spark.sql.types.util.DataTypeConversions
import org.apache.spark.sql.{SQLContext, SchemaRDD, SchemaRDDLike}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala b/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
index d1aa3c8d53..77353f4eb0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.types.util
import org.apache.spark.sql._
-import org.apache.spark.sql.api.java.types.{DataType => JDataType, StructField => JStructField}
+import org.apache.spark.sql.api.java.{DataType => JDataType, StructField => JStructField}
import scala.collection.JavaConverters._
@@ -74,37 +74,37 @@ protected[sql] object DataTypeConversions {
* Returns the equivalent DataType in Scala for the given DataType in Java.
*/
def asScalaDataType(javaDataType: JDataType): DataType = javaDataType match {
- case stringType: org.apache.spark.sql.api.java.types.StringType =>
+ case stringType: org.apache.spark.sql.api.java.StringType =>
StringType
- case binaryType: org.apache.spark.sql.api.java.types.BinaryType =>
+ case binaryType: org.apache.spark.sql.api.java.BinaryType =>
BinaryType
- case booleanType: org.apache.spark.sql.api.java.types.BooleanType =>
+ case booleanType: org.apache.spark.sql.api.java.BooleanType =>
BooleanType
- case timestampType: org.apache.spark.sql.api.java.types.TimestampType =>
+ case timestampType: org.apache.spark.sql.api.java.TimestampType =>
TimestampType
- case decimalType: org.apache.spark.sql.api.java.types.DecimalType =>
+ case decimalType: org.apache.spark.sql.api.java.DecimalType =>
DecimalType
- case doubleType: org.apache.spark.sql.api.java.types.DoubleType =>
+ case doubleType: org.apache.spark.sql.api.java.DoubleType =>
DoubleType
- case floatType: org.apache.spark.sql.api.java.types.FloatType =>
+ case floatType: org.apache.spark.sql.api.java.FloatType =>
FloatType
- case byteType: org.apache.spark.sql.api.java.types.ByteType =>
+ case byteType: org.apache.spark.sql.api.java.ByteType =>
ByteType
- case integerType: org.apache.spark.sql.api.java.types.IntegerType =>
+ case integerType: org.apache.spark.sql.api.java.IntegerType =>
IntegerType
- case longType: org.apache.spark.sql.api.java.types.LongType =>
+ case longType: org.apache.spark.sql.api.java.LongType =>
LongType
- case shortType: org.apache.spark.sql.api.java.types.ShortType =>
+ case shortType: org.apache.spark.sql.api.java.ShortType =>
ShortType
- case arrayType: org.apache.spark.sql.api.java.types.ArrayType =>
+ case arrayType: org.apache.spark.sql.api.java.ArrayType =>
ArrayType(asScalaDataType(arrayType.getElementType), arrayType.isContainsNull)
- case mapType: org.apache.spark.sql.api.java.types.MapType =>
+ case mapType: org.apache.spark.sql.api.java.MapType =>
MapType(
asScalaDataType(mapType.getKeyType),
asScalaDataType(mapType.getValueType),
mapType.isValueContainsNull)
- case structType: org.apache.spark.sql.api.java.types.StructType =>
+ case structType: org.apache.spark.sql.api.java.StructType =>
StructType(structType.getFields.map(asScalaStructField))
}
}
diff --git a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
index 8ee4591105..3c92906d82 100644
--- a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
+++ b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
@@ -28,9 +28,6 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import org.apache.spark.sql.api.java.types.DataType;
-import org.apache.spark.sql.api.java.types.StructField;
-import org.apache.spark.sql.api.java.types.StructType;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
diff --git a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
index 96a503962f..d099a48a1f 100644
--- a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
+++ b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
@@ -24,8 +24,6 @@ import org.junit.Assert;
import org.junit.Test;
import org.apache.spark.sql.types.util.DataTypeConversions;
-import org.apache.spark.sql.api.java.types.DataType;
-import org.apache.spark.sql.api.java.types.StructField;
public class JavaSideDataTypeConversionSuite {
public void checkDataType(DataType javaDataType) {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
index 46de6fe239..ff1debff0f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
@@ -20,12 +20,13 @@ package org.apache.spark.sql.api.java
import org.apache.spark.sql.types.util.DataTypeConversions
import org.scalatest.FunSuite
-import org.apache.spark.sql._
+import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField}
+import org.apache.spark.sql.{StructType => SStructType}
import DataTypeConversions._
class ScalaSideDataTypeConversionSuite extends FunSuite {
- def checkDataType(scalaDataType: DataType) {
+ def checkDataType(scalaDataType: SDataType) {
val javaDataType = asJavaDataType(scalaDataType)
val actual = asScalaDataType(javaDataType)
assert(scalaDataType === actual, s"Converted data type ${actual} " +
@@ -34,48 +35,52 @@ class ScalaSideDataTypeConversionSuite extends FunSuite {
test("convert data types") {
// Simple DataTypes.
- checkDataType(StringType)
- checkDataType(BinaryType)
- checkDataType(BooleanType)
- checkDataType(TimestampType)
- checkDataType(DecimalType)
- checkDataType(DoubleType)
- checkDataType(FloatType)
- checkDataType(ByteType)
- checkDataType(IntegerType)
- checkDataType(LongType)
- checkDataType(ShortType)
+ checkDataType(org.apache.spark.sql.StringType)
+ checkDataType(org.apache.spark.sql.BinaryType)
+ checkDataType(org.apache.spark.sql.BooleanType)
+ checkDataType(org.apache.spark.sql.TimestampType)
+ checkDataType(org.apache.spark.sql.DecimalType)
+ checkDataType(org.apache.spark.sql.DoubleType)
+ checkDataType(org.apache.spark.sql.FloatType)
+ checkDataType(org.apache.spark.sql.ByteType)
+ checkDataType(org.apache.spark.sql.IntegerType)
+ checkDataType(org.apache.spark.sql.LongType)
+ checkDataType(org.apache.spark.sql.ShortType)
// Simple ArrayType.
- val simpleScalaArrayType = ArrayType(StringType, true)
+ val simpleScalaArrayType =
+ org.apache.spark.sql.ArrayType(org.apache.spark.sql.StringType, true)
checkDataType(simpleScalaArrayType)
// Simple MapType.
- val simpleScalaMapType = MapType(StringType, LongType)
+ val simpleScalaMapType =
+ org.apache.spark.sql.MapType(org.apache.spark.sql.StringType, org.apache.spark.sql.LongType)
checkDataType(simpleScalaMapType)
// Simple StructType.
- val simpleScalaStructType = StructType(
- StructField("a", DecimalType, false) ::
- StructField("b", BooleanType, true) ::
- StructField("c", LongType, true) ::
- StructField("d", BinaryType, false) :: Nil)
+ val simpleScalaStructType = SStructType(
+ SStructField("a", org.apache.spark.sql.DecimalType, false) ::
+ SStructField("b", org.apache.spark.sql.BooleanType, true) ::
+ SStructField("c", org.apache.spark.sql.LongType, true) ::
+ SStructField("d", org.apache.spark.sql.BinaryType, false) :: Nil)
checkDataType(simpleScalaStructType)
// Complex StructType.
- val complexScalaStructType = StructType(
- StructField("simpleArray", simpleScalaArrayType, true) ::
- StructField("simpleMap", simpleScalaMapType, true) ::
- StructField("simpleStruct", simpleScalaStructType, true) ::
- StructField("boolean", BooleanType, false) :: Nil)
+ val complexScalaStructType = SStructType(
+ SStructField("simpleArray", simpleScalaArrayType, true) ::
+ SStructField("simpleMap", simpleScalaMapType, true) ::
+ SStructField("simpleStruct", simpleScalaStructType, true) ::
+ SStructField("boolean", org.apache.spark.sql.BooleanType, false) :: Nil)
checkDataType(complexScalaStructType)
// Complex ArrayType.
- val complexScalaArrayType = ArrayType(complexScalaStructType, true)
+ val complexScalaArrayType =
+ org.apache.spark.sql.ArrayType(complexScalaStructType, true)
checkDataType(complexScalaArrayType)
// Complex MapType.
- val complexScalaMapType = MapType(complexScalaStructType, complexScalaArrayType, false)
+ val complexScalaMapType =
+ org.apache.spark.sql.MapType(complexScalaStructType, complexScalaArrayType, false)
checkDataType(complexScalaMapType)
}
}