aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-11-02 15:18:29 -0800
committerMichael Armbrust <michael@databricks.com>2014-11-02 15:18:29 -0800
commitc9f840046f8c45b1137f0289eeb0c980de72ea5e (patch)
treec867149b1f31ab6845ccbb412b73980b1c65b2a2 /sql/core
parent495a132031ae002c787371f2fd0ba4be2437e7c8 (diff)
downloadspark-c9f840046f8c45b1137f0289eeb0c980de72ea5e.tar.gz
spark-c9f840046f8c45b1137f0289eeb0c980de72ea5e.tar.bz2
spark-c9f840046f8c45b1137f0289eeb0c980de72ea5e.zip
[SPARK-3791][SQL] Provides Spark version and Hive version in HiveThriftServer2
This PR overrides the `GetInfo` Hive Thrift API to provide correct version information. Another property `spark.sql.hive.version` is added to reveal the underlying Hive version. These are generally useful for Spark SQL ODBC driver providers. The Spark version information is extracted from the jar manifest. Also took the chance to remove the `SET -v` hack, which was a workaround for Simba ODBC driver connectivity. TODO - [x] Find a general way to figure out Hive (or even any dependency) version. This [blog post](http://blog.soebes.de/blog/2014/01/02/version-information-into-your-appas-with-maven/) suggests several methods to inspect application version. In the case of Spark, this can be tricky because the chosen method: 1. must applies to both Maven build and SBT build For Maven builds, we can retrieve the version information from the META-INF/maven directory within the assembly jar. But this doesn't work for SBT builds. 2. must not rely on the original jars of dependencies to extract specific dependency version, because Spark uses assembly jar. This implies we can't read Hive version from Hive jar files since standard Spark distribution doesn't include them. 3. should play well with `SPARK_PREPEND_CLASSES` to ease local testing during development. `SPARK_PREPEND_CLASSES` prevents classes to be loaded from the assembly jar, thus we can't locate the jar file and read its manifest. Given these, maybe the only reliable method is to generate a source file containing version information at build time. pwendell Do you have any suggestions from the perspective of the build process? **Update** Hive version is now retrieved from the newly introduced `HiveShim` object. Author: Cheng Lian <lian.cs.zju@gmail.com> Author: Cheng Lian <lian@databricks.com> Closes #2843 from liancheng/get-info and squashes the following commits: a873d0f [Cheng Lian] Updates test case 53f43cd [Cheng Lian] Retrieves underlying Hive verson via HiveShim 1d282b8 [Cheng Lian] Removes the Simba ODBC "SET -v" hack f857fce [Cheng Lian] Overrides Hive GetInfo Thrift API and adds Hive version property
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala69
1 files changed, 27 insertions, 42 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index e658e6fc4d..f23b9c48cf 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -84,50 +84,35 @@ case class SetCommand(kv: Option[(String, Option[String])], output: Seq[Attribut
extends LeafNode with Command with Logging {
override protected lazy val sideEffectResult: Seq[Row] = kv match {
- // Set value for the key.
- case Some((key, Some(value))) =>
- if (key == SQLConf.Deprecated.MAPRED_REDUCE_TASKS) {
- logWarning(s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " +
+ // Configures the deprecated "mapred.reduce.tasks" property.
+ case Some((SQLConf.Deprecated.MAPRED_REDUCE_TASKS, Some(value))) =>
+ logWarning(
+ s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " +
s"automatically converted to ${SQLConf.SHUFFLE_PARTITIONS} instead.")
- context.setConf(SQLConf.SHUFFLE_PARTITIONS, value)
- Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=$value"))
- } else {
- context.setConf(key, value)
- Seq(Row(s"$key=$value"))
- }
-
- // Query the value bound to the key.
+ context.setConf(SQLConf.SHUFFLE_PARTITIONS, value)
+ Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=$value"))
+
+ // Configures a single property.
+ case Some((key, Some(value))) =>
+ context.setConf(key, value)
+ Seq(Row(s"$key=$value"))
+
+ // Queries all key-value pairs that are set in the SQLConf of the context. Notice that different
+ // from Hive, here "SET -v" is an alias of "SET". (In Hive, "SET" returns all changed properties
+ // while "SET -v" returns all properties.)
+ case Some(("-v", None)) | None =>
+ context.getAllConfs.map { case (k, v) => Row(s"$k=$v") }.toSeq
+
+ // Queries the deprecated "mapred.reduce.tasks" property.
+ case Some((SQLConf.Deprecated.MAPRED_REDUCE_TASKS, None)) =>
+ logWarning(
+ s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " +
+ s"showing ${SQLConf.SHUFFLE_PARTITIONS} instead.")
+ Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=${context.numShufflePartitions}"))
+
+ // Queries a single property.
case Some((key, None)) =>
- // TODO (lian) This is just a workaround to make the Simba ODBC driver work.
- // Should remove this once we get the ODBC driver updated.
- if (key == "-v") {
- val hiveJars = Seq(
- "hive-exec-0.12.0.jar",
- "hive-service-0.12.0.jar",
- "hive-common-0.12.0.jar",
- "hive-hwi-0.12.0.jar",
- "hive-0.12.0.jar").mkString(":")
-
- context.getAllConfs.map { case (k, v) =>
- Row(s"$k=$v")
- }.toSeq ++ Seq(
- Row("system:java.class.path=" + hiveJars),
- Row("system:sun.java.command=shark.SharkServer2"))
- } else {
- if (key == SQLConf.Deprecated.MAPRED_REDUCE_TASKS) {
- logWarning(s"Property ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS} is deprecated, " +
- s"showing ${SQLConf.SHUFFLE_PARTITIONS} instead.")
- Seq(Row(s"${SQLConf.SHUFFLE_PARTITIONS}=${context.numShufflePartitions}"))
- } else {
- Seq(Row(s"$key=${context.getConf(key, "<undefined>")}"))
- }
- }
-
- // Query all key-value pairs that are set in the SQLConf of the context.
- case _ =>
- context.getAllConfs.map { case (k, v) =>
- Row(s"$k=$v")
- }.toSeq
+ Seq(Row(s"$key=${context.getConf(key, "<undefined>")}"))
}
override def otherCopyArgs = context :: Nil