From ff9ae61a3b7bbbfc2aac93a99c05a9e1ea9c08bc Mon Sep 17 00:00:00 2001 From: Mark Grover Date: Thu, 14 Apr 2016 18:51:43 -0700 Subject: [SPARK-14601][DOC] Minor doc/usage changes related to removal of Spark assembly ## What changes were proposed in this pull request? Removing references to assembly jar in documentation. Adding an additional (previously undocumented) usage of spark-submit to run examples. ## How was this patch tested? Ran spark-submit usage to ensure formatting was fine. Ran examples using SparkSubmit. Author: Mark Grover Closes #12365 from markgrover/spark-14601. --- .../spark/sql/execution/datasources/parquet/ParquetRelation.scala | 2 +- sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'sql') diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala index b91e892f8f..bfe7aefe41 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala @@ -784,7 +784,7 @@ private[sql] object ParquetRelation extends Logging { // scalastyle:on classforname redirect(JLogger.getLogger("parquet")) } catch { case _: Throwable => - // SPARK-9974: com.twitter:parquet-hadoop-bundle:1.6.0 is not packaged into the assembly jar + // SPARK-9974: com.twitter:parquet-hadoop-bundle:1.6.0 is not packaged into the assembly // when Spark is built with SBT. So `parquet.Log` may not be found. This try/catch block // should be removed after this issue is fixed. } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index 505e5c0bb6..ff93bfc4a3 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -429,7 +429,7 @@ private[hive] object HiveContext extends Logging { | Location of the jars that should be used to instantiate the HiveMetastoreClient. | This property can be one of three options: " | 1. "builtin" - | Use Hive ${hiveExecutionVersion}, which is bundled with the Spark assembly jar when + | Use Hive ${hiveExecutionVersion}, which is bundled with the Spark assembly when | -Phive is enabled. When this option is chosen, | spark.sql.hive.metastore.version must be either | ${hiveExecutionVersion} or not defined. -- cgit v1.2.3