aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/main
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2014-06-19 14:14:03 -0700
committerReynold Xin <rxin@apache.org>2014-06-19 14:14:21 -0700
commit58d06846f06726a9a6ce10995fe71155eae880bf (patch)
tree4f459d8721dbf057e540958ab810eed025387e02 /sql/hive/src/main
parenta4c3a806906dffa0ee7a2c54a3b3f5cfb3225639 (diff)
downloadspark-58d06846f06726a9a6ce10995fe71155eae880bf.tar.gz
spark-58d06846f06726a9a6ce10995fe71155eae880bf.tar.bz2
spark-58d06846f06726a9a6ce10995fe71155eae880bf.zip
[SPARK-2191][SQL] Make sure InsertIntoHiveTable doesn't execute more than once.
Author: Michael Armbrust <michael@databricks.com> Closes #1129 from marmbrus/doubleCreateAs and squashes the following commits: 9c6d9e4 [Michael Armbrust] Fix typo. 5128fe2 [Michael Armbrust] Make sure InsertIntoHiveTable doesn't execute each time you ask for its result. (cherry picked from commit 777c5958c4088182f9e2daba435ccb413a2f69d7) Signed-off-by: Reynold Xin <rxin@apache.org>
Diffstat (limited to 'sql/hive/src/main')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala6
1 files changed, 5 insertions, 1 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala
index a839231449..240aa0df49 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/hiveOperators.scala
@@ -344,12 +344,16 @@ case class InsertIntoHiveTable(
writer.commitJob()
}
+ override def execute() = result
+
/**
* Inserts all the rows in the table into Hive. Row objects are properly serialized with the
* `org.apache.hadoop.hive.serde2.SerDe` and the
* `org.apache.hadoop.mapred.OutputFormat` provided by the table definition.
+ *
+ * Note: this is run once and then kept to avoid double insertions.
*/
- def execute() = {
+ private lazy val result: RDD[Row] = {
val childRdd = child.execute()
assert(childRdd != null)