aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-01-29 15:13:09 -0800
committerReynold Xin <rxin@databricks.com>2015-01-29 15:13:09 -0800
commit715632232d0e6c97e304686608385d3b54a4bcf6 (patch)
tree747dac8cd85ec89d40537b980346d4d53bd69d93 /sql
parentf9e569452e2f0ae69037644170d8aa79ac6b4ccf (diff)
downloadspark-715632232d0e6c97e304686608385d3b54a4bcf6.tar.gz
spark-715632232d0e6c97e304686608385d3b54a4bcf6.tar.bz2
spark-715632232d0e6c97e304686608385d3b54a4bcf6.zip
[SPARK-5445][SQL] Consolidate Java and Scala DSL static methods.
Turns out Scala does generate static methods for ones defined in a companion object. Finally no need to separate api.java.dsl and api.scala.dsl. Author: Reynold Xin <rxin@databricks.com> Closes #4276 from rxin/dsl and squashes the following commits: 30aa611 [Reynold Xin] Add all files. 1a9d215 [Reynold Xin] [SPARK-5445][SQL] Consolidate Java and Scala DSL static methods.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Column.scala5
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala3
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dsl.scala (renamed from sql/core/src/main/scala/org/apache/spark/sql/api/scala/dsl/package.scala)39
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/api/java/dsl.java92
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/TestData.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala2
18 files changed, 35 insertions, 134 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
index 9be2a03afa..ca50fd6f05 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
import scala.language.implicitConversions
-import org.apache.spark.sql.api.scala.dsl.lit
+import org.apache.spark.sql.Dsl.lit
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, Star}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical.{Project, LogicalPlan}
@@ -28,8 +28,7 @@ import org.apache.spark.sql.types._
object Column {
/**
- * Creates a [[Column]] based on the given column name.
- * Same as [[api.scala.dsl.col]] and [[api.java.dsl.col]].
+ * Creates a [[Column]] based on the given column name. Same as [[Dsl.col]].
*/
def apply(colName: String): Column = new Column(colName)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index 050366aea8..94c13a5c26 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -51,8 +51,7 @@ import org.apache.spark.util.Utils
* }}}
*
* Once created, it can be manipulated using the various domain-specific-language (DSL) functions
- * defined in: [[DataFrame]] (this class), [[Column]], [[api.scala.dsl]] for Scala DSL, and
- * [[api.java.dsl]] for Java DSL.
+ * defined in: [[DataFrame]] (this class), [[Column]], [[Dsl]] for the DSL.
*
* To select a column from the data frame, use the apply method:
* {{{
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/scala/dsl/package.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dsl.scala
index dc851fc504..f47ff995e9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/scala/dsl/package.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dsl.scala
@@ -15,43 +15,38 @@
* limitations under the License.
*/
-package org.apache.spark.sql.api.scala
+package org.apache.spark.sql
import scala.language.implicitConversions
import scala.reflect.runtime.universe.{TypeTag, typeTag}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
/**
- * Scala version of the domain specific functions available for [[DataFrame]].
- *
- * The Java-version is at [[api.java.dsl]].
+ * Domain specific functions available for [[DataFrame]].
*/
-package object dsl {
- // NOTE: Update also the Java version when we update this version.
+object Dsl {
/** An implicit conversion that turns a Scala `Symbol` into a [[Column]]. */
implicit def symbolToColumn(s: Symbol): ColumnName = new ColumnName(s.name)
-// /**
-// * An implicit conversion that turns a RDD of product into a [[DataFrame]].
-// *
-// * This method requires an implicit SQLContext in scope. For example:
-// * {{{
-// * implicit val sqlContext: SQLContext = ...
-// * val rdd: RDD[(Int, String)] = ...
-// * rdd.toDataFrame // triggers the implicit here
-// * }}}
-// */
-// implicit def rddToDataFrame[A <: Product: TypeTag](rdd: RDD[A])(implicit context: SQLContext)
-// : DataFrame = {
-// context.createDataFrame(rdd)
-// }
+ // /**
+ // * An implicit conversion that turns a RDD of product into a [[DataFrame]].
+ // *
+ // * This method requires an implicit SQLContext in scope. For example:
+ // * {{{
+ // * implicit val sqlContext: SQLContext = ...
+ // * val rdd: RDD[(Int, String)] = ...
+ // * rdd.toDataFrame // triggers the implicit here
+ // * }}}
+ // */
+ // implicit def rddToDataFrame[A <: Product: TypeTag](rdd: RDD[A])(implicit context: SQLContext)
+ // : DataFrame = {
+ // context.createDataFrame(rdd)
+ // }
/** Converts $"col name" into an [[Column]]. */
implicit class StringToColumn(val sc: StringContext) extends AnyVal {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/java/dsl.java b/sql/core/src/main/scala/org/apache/spark/sql/api/java/dsl.java
deleted file mode 100644
index 16702afdb3..0000000000
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/java/dsl.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.api.java;
-
-import org.apache.spark.sql.Column;
-import org.apache.spark.sql.DataFrame;
-import org.apache.spark.sql.api.scala.dsl.package$;
-
-
-/**
- * Java version of the domain-specific functions available for {@link DataFrame}.
- *
- * The Scala version is at {@link org.apache.spark.sql.api.scala.dsl}.
- */
-public class dsl {
- // NOTE: Update also the Scala version when we update this version.
-
- private static package$ scalaDsl = package$.MODULE$;
-
- /**
- * Returns a {@link Column} based on the given column name.
- */
- public static Column col(String colName) {
- return new Column(colName);
- }
-
- /**
- * Creates a column of literal value.
- */
- public static Column lit(Object literalValue) {
- return scalaDsl.lit(literalValue);
- }
-
- public static Column sum(Column e) {
- return scalaDsl.sum(e);
- }
-
- public static Column sumDistinct(Column e) {
- return scalaDsl.sumDistinct(e);
- }
-
- public static Column avg(Column e) {
- return scalaDsl.avg(e);
- }
-
- public static Column first(Column e) {
- return scalaDsl.first(e);
- }
-
- public static Column last(Column e) {
- return scalaDsl.last(e);
- }
-
- public static Column min(Column e) {
- return scalaDsl.min(e);
- }
-
- public static Column max(Column e) {
- return scalaDsl.max(e);
- }
-
- public static Column upper(Column e) {
- return scalaDsl.upper(e);
- }
-
- public static Column lower(Column e) {
- return scalaDsl.lower(e);
- }
-
- public static Column sqrt(Column e) {
- return scalaDsl.sqrt(e);
- }
-
- public static Column abs(Column e) {
- return scalaDsl.abs(e);
- }
-}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index cccc5473bd..c9221f8f93 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.columnar._
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.storage.{StorageLevel, RDDBlockId}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
index 82029319de..6428554ec7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.types.{BooleanType, IntegerType, StructField, StructType}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index b1fb1bd289..db83a906d9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.types._
/* Implicits */
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
index bb95248c38..f0c939dbb1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.sql.TestData._
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.execution.joins._
import org.apache.spark.sql.test.TestSQLContext._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 9bb64030f4..e03444d496 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -21,7 +21,7 @@ import java.util.TimeZone
import org.scalatest.BeforeAndAfterAll
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.types._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
index eae6acf5c9..dd781169ca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
import java.sql.Timestamp
import org.apache.spark.sql.catalyst.plans.logical
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.test._
/* Implicits */
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
index b122d7d5bb..95923f9aad 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql
-import org.apache.spark.sql.api.scala.dsl.StringToColumn
+import org.apache.spark.sql.Dsl.StringToColumn
import org.apache.spark.sql.test._
/* Implicits */
@@ -45,7 +45,7 @@ class UDFSuite extends QueryTest {
test("struct UDF") {
udf.register("returnStruct", (f1: String, f2: String) => FunctionResult(f1, f2))
- val result=
+ val result =
sql("SELECT returnStruct('test', 'test2') as ret")
.select($"ret.f1").head().getString(0)
assert(result === "test")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
index 59e6f00cfe..0696a2335e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
import scala.beans.{BeanInfo, BeanProperty}
import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.sql.types._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
index 2698a599b2..3d33484ab0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/columnar/InMemoryColumnarQuerySuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.columnar
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.test.TestSQLContext._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
index 1f701e2e73..df108a9d26 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.execution
import org.scalatest.FunSuite
import org.apache.spark.sql.{SQLConf, execution}
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index 634792c98f..cb615388da 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -21,7 +21,7 @@ import java.sql.{Date, Timestamp}
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.json.JsonRDD.{compatibleType, enforceCorrectType}
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.test.TestSQLContext._
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
index 0e91834e55..d9ab16baf9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
@@ -33,7 +33,7 @@ import parquet.schema.{MessageType, MessageTypeParser}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.{DataFrame, QueryTest, SQLConf}
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.test.TestSQLContext
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index a485158a47..42819e3584 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.spark.{SparkFiles, SparkException}
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.catalyst.plans.logical.Project
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.hive._
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.hive.test.TestHive._
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
index efea3d8cdb..8fb5e050a2 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.hive.execution
import org.apache.spark.sql.Row
-import org.apache.spark.sql.api.scala.dsl._
+import org.apache.spark.sql.Dsl._
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.hive.test.TestHive._