diff options
author | Ankur Dave <ankurdave@gmail.com> | 2015-12-11 19:07:48 -0800 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2015-12-11 19:07:48 -0800 |
commit | 1e799d617a28cd0eaa8f22d103ea8248c4655ae5 (patch) | |
tree | cc1137d4598c85cda1110b7cf16758ee1627048e | |
parent | a0ff6d16ef4bcc1b6ff7282e82a9b345d8449454 (diff) | |
download | spark-1e799d617a28cd0eaa8f22d103ea8248c4655ae5.tar.gz spark-1e799d617a28cd0eaa8f22d103ea8248c4655ae5.tar.bz2 spark-1e799d617a28cd0eaa8f22d103ea8248c4655ae5.zip |
[SPARK-12298][SQL] Fix infinite loop in DataFrame.sortWithinPartitions
Modifies the String overload to call the Column overload and ensures this is called in a test.
Author: Ankur Dave <ankurdave@gmail.com>
Closes #10271 from ankurdave/SPARK-12298.
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala | 2 | ||||
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala | 4 |
2 files changed, 3 insertions, 3 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala index da180a2ba0..497bd48266 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala @@ -609,7 +609,7 @@ class DataFrame private[sql]( */ @scala.annotation.varargs def sortWithinPartitions(sortCol: String, sortCols: String*): DataFrame = { - sortWithinPartitions(sortCol, sortCols : _*) + sortWithinPartitions((sortCol +: sortCols).map(Column(_)) : _*) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 5353fefaf4..c0bbf73ab1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -1090,8 +1090,8 @@ class DataFrameSuite extends QueryTest with SharedSQLContext { } // Distribute into one partition and order by. This partition should contain all the values. - val df6 = data.repartition(1, $"a").sortWithinPartitions($"b".asc) - // Walk each partition and verify that it is sorted descending and not globally sorted. + val df6 = data.repartition(1, $"a").sortWithinPartitions("b") + // Walk each partition and verify that it is sorted ascending and not globally sorted. df6.rdd.foreachPartition { p => var previousValue: Int = -1 var allSequential: Boolean = true |