diff options
author | Cheng Lian <lian@databricks.com> | 2015-06-24 09:49:20 -0700 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2015-06-24 09:49:20 -0700 |
commit | 9d36ec24312f0a9865b4392f89e9611a5b80916d (patch) | |
tree | d64569ea6b6e1addf8a202a327365d134bc33a79 /sql | |
parent | cc465fd92482737c21971d82e30d4cf247acf932 (diff) | |
download | spark-9d36ec24312f0a9865b4392f89e9611a5b80916d.tar.gz spark-9d36ec24312f0a9865b4392f89e9611a5b80916d.tar.bz2 spark-9d36ec24312f0a9865b4392f89e9611a5b80916d.zip |
[SPARK-8567] [SQL] Debugging flaky HiveSparkSubmitSuite
Using similar approach used in `HiveThriftServer2Suite` to print stdout/stderr of the spawned process instead of logging them to see what happens on Jenkins. (This test suite only fails on Jenkins and doesn't spill out any log...)
cc yhuai
Author: Cheng Lian <lian@databricks.com>
Closes #6978 from liancheng/debug-hive-spark-submit-suite and squashes the following commits:
b031647 [Cheng Lian] Prints process stdout/stderr instead of logging them
Diffstat (limited to 'sql')
-rw-r--r-- | sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala | 14 |
1 files changed, 11 insertions, 3 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala index ab443032be..d85516ab08 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala @@ -19,6 +19,8 @@ package org.apache.spark.sql.hive import java.io.File +import scala.sys.process.{ProcessLogger, Process} + import org.apache.spark._ import org.apache.spark.sql.hive.test.{TestHive, TestHiveContext} import org.apache.spark.util.{ResetSystemProperties, Utils} @@ -82,12 +84,18 @@ class HiveSparkSubmitSuite // This is copied from org.apache.spark.deploy.SparkSubmitSuite private def runSparkSubmit(args: Seq[String]): Unit = { val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) - val process = Utils.executeCommand( + val process = Process( Seq("./bin/spark-submit") ++ args, new File(sparkHome), - Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome)) + "SPARK_TESTING" -> "1", + "SPARK_HOME" -> sparkHome + ).run(ProcessLogger( + (line: String) => { println(s"out> $line") }, + (line: String) => { println(s"err> $line") } + )) + try { - val exitCode = failAfter(120 seconds) { process.waitFor() } + val exitCode = failAfter(120 seconds) { process.exitValue() } if (exitCode != 0) { fail(s"Process returned with exit code $exitCode. See the log4j logs for more detail.") } |