aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala28
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala19
2 files changed, 41 insertions, 6 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index c421006c8f..cf344710ff 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql
import java.io.CharArrayWriter
import java.sql.DriverManager
+
import scala.collection.JavaConversions._
import scala.language.implicitConversions
import scala.reflect.ClassTag
@@ -28,6 +29,7 @@ import scala.util.control.NonFatal
import com.fasterxml.jackson.core.JsonFactory
+import org.apache.commons.lang3.StringUtils
import org.apache.spark.annotation.{DeveloperApi, Experimental}
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.api.python.SerDeUtil
@@ -175,6 +177,7 @@ class DataFrame private[sql](
* @param numRows Number of rows to show
*/
private[sql] def showString(numRows: Int): String = {
+ val sb = new StringBuilder
val data = take(numRows)
val numCols = schema.fieldNames.length
@@ -194,12 +197,25 @@ class DataFrame private[sql](
}
}
- // Pad the cells
- rows.map { row =>
- row.zipWithIndex.map { case (cell, i) =>
- String.format(s"%-${colWidths(i)}s", cell)
- }.mkString(" ")
- }.mkString("\n")
+ // Create SeparateLine
+ val sep: String = colWidths.map("-" * _).addString(sb, "+", "+", "+\n").toString()
+
+ // column names
+ rows.head.zipWithIndex.map { case (cell, i) =>
+ StringUtils.leftPad(cell.toString, colWidths(i))
+ }.addString(sb, "|", "|", "|\n")
+
+ sb.append(sep)
+
+ // data
+ rows.tail.map {
+ _.zipWithIndex.map { case (cell, i) =>
+ StringUtils.leftPad(cell.toString, colWidths(i))
+ }.addString(sb, "|", "|", "|\n")
+ }
+
+ sb.append(sep)
+ sb.toString()
}
override def toString: String = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index e286fef23c..ff31e15e2d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -598,6 +598,25 @@ class DataFrameSuite extends QueryTest {
testData.select($"*").show(1000)
}
+ test("SPARK-7319 showString") {
+ val expectedAnswer = """+---+-----+
+ ||key|value|
+ |+---+-----+
+ || 1| 1|
+ |+---+-----+
+ |""".stripMargin
+ assert(testData.select($"*").showString(1) === expectedAnswer)
+ }
+
+ test("SPARK-7327 show with empty dataFrame") {
+ val expectedAnswer = """+---+-----+
+ ||key|value|
+ |+---+-----+
+ |+---+-----+
+ |""".stripMargin
+ assert(testData.select($"*").filter($"key" < 0).showString(1) === expectedAnswer)
+ }
+
test("createDataFrame(RDD[Row], StructType) should convert UDTs (SPARK-6672)") {
val rowRDD = TestSQLContext.sparkContext.parallelize(Seq(Row(new ExamplePoint(1.0, 2.0))))
val schema = StructType(Array(StructField("point", new ExamplePointUDT(), false)))