aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorCheolsoo Park <cheolsoop@netflix.com>2015-05-27 00:18:42 -0700
committerReynold Xin <rxin@databricks.com>2015-05-27 00:18:42 -0700
commit6dd645870d34d97ac992032bfd6cf39f20a0c50f (patch)
tree4d50c4c56aa63376f24ae8912e98285ab97c4996 /sql/hive
parenta9f1c0c57b9be586dbada09dab91dcfce31141d9 (diff)
downloadspark-6dd645870d34d97ac992032bfd6cf39f20a0c50f.tar.gz
spark-6dd645870d34d97ac992032bfd6cf39f20a0c50f.tar.bz2
spark-6dd645870d34d97ac992032bfd6cf39f20a0c50f.zip
[SPARK-7850][BUILD] Hive 0.12.0 profile in POM should be removed
I grep'ed hive-0.12.0 in the source code and removed all the profiles and doc references. Author: Cheolsoo Park <cheolsoop@netflix.com> Closes #6393 from piaozhexiu/SPARK-7850 and squashes the following commits: fb429ce [Cheolsoo Park] Remove hive-0.13.1 profile 82bf09a [Cheolsoo Park] Remove hive 0.12.0 shim code f3722da [Cheolsoo Park] Remove hive-0.12.0 profile and references from POM and build docs
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/pom.xml10
-rw-r--r--sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala265
2 files changed, 0 insertions, 275 deletions
diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml
index e322340094..615b07e74d 100644
--- a/sql/hive/pom.xml
+++ b/sql/hive/pom.xml
@@ -136,16 +136,6 @@
</plugins>
</build>
</profile>
- <profile>
- <id>hive-0.12.0</id>
- <dependencies>
- <dependency>
- <groupId>com.twitter</groupId>
- <artifactId>parquet-hive-bundle</artifactId>
- <version>1.5.0</version>
- </dependency>
- </dependencies>
- </profile>
</profiles>
<build>
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
deleted file mode 100644
index 33e96eaabf..0000000000
--- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.hive
-
-import java.net.URI
-import java.util.{ArrayList => JArrayList, Properties}
-
-import scala.collection.JavaConversions._
-import scala.language.implicitConversions
-
-import org.apache.hadoop.{io => hadoopIo}
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.Path
-import org.apache.hadoop.hive.common.`type`.HiveDecimal
-import org.apache.hadoop.hive.conf.HiveConf
-import org.apache.hadoop.hive.ql.Context
-import org.apache.hadoop.hive.ql.metadata.{Hive, Partition, Table}
-import org.apache.hadoop.hive.ql.plan.{CreateTableDesc, FileSinkDesc, TableDesc}
-import org.apache.hadoop.hive.ql.processors._
-import org.apache.hadoop.hive.ql.stats.StatsSetupConst
-import org.apache.hadoop.hive.serde2.{ColumnProjectionUtils, Deserializer, io => hiveIo}
-import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspectorConverters, ObjectInspector, PrimitiveObjectInspector}
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.{HiveDecimalObjectInspector, PrimitiveObjectInspectorFactory}
-import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, TypeInfoFactory}
-import org.apache.hadoop.io.{NullWritable, Writable}
-import org.apache.hadoop.mapred.InputFormat
-
-import org.apache.spark.sql.types.{UTF8String, Decimal, DecimalType}
-
-private[hive] case class HiveFunctionWrapper(functionClassName: String)
- extends java.io.Serializable {
-
- // for Serialization
- def this() = this(null)
-
- import org.apache.spark.util.Utils._
- def createFunction[UDFType <: AnyRef](): UDFType = {
- getContextOrSparkClassLoader
- .loadClass(functionClassName).newInstance.asInstanceOf[UDFType]
- }
-}
-
-/**
- * A compatibility layer for interacting with Hive version 0.12.0.
- */
-private[hive] object HiveShim {
- val version = "0.12.0"
-
- def getTableDesc(
- serdeClass: Class[_ <: Deserializer],
- inputFormatClass: Class[_ <: InputFormat[_, _]],
- outputFormatClass: Class[_],
- properties: Properties) = {
- new TableDesc(serdeClass, inputFormatClass, outputFormatClass, properties)
- }
-
- def getStringWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.STRING,
- getStringWritable(value))
-
- def getIntWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.INT,
- getIntWritable(value))
-
- def getDoubleWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.DOUBLE,
- getDoubleWritable(value))
-
- def getBooleanWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.BOOLEAN,
- getBooleanWritable(value))
-
- def getLongWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.LONG,
- getLongWritable(value))
-
- def getFloatWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.FLOAT,
- getFloatWritable(value))
-
- def getShortWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.SHORT,
- getShortWritable(value))
-
- def getByteWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.BYTE,
- getByteWritable(value))
-
- def getBinaryWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.BINARY,
- getBinaryWritable(value))
-
- def getDateWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.DATE,
- getDateWritable(value))
-
- def getTimestampWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.TIMESTAMP,
- getTimestampWritable(value))
-
- def getDecimalWritableConstantObjectInspector(value: Any): ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.DECIMAL,
- getDecimalWritable(value))
-
- def getPrimitiveNullWritableConstantObjectInspector: ObjectInspector =
- PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
- PrimitiveCategory.VOID, null)
-
- def getStringWritable(value: Any): hadoopIo.Text =
- if (value == null) null else new hadoopIo.Text(value.asInstanceOf[UTF8String].toString)
-
- def getIntWritable(value: Any): hadoopIo.IntWritable =
- if (value == null) null else new hadoopIo.IntWritable(value.asInstanceOf[Int])
-
- def getDoubleWritable(value: Any): hiveIo.DoubleWritable =
- if (value == null) null else new hiveIo.DoubleWritable(value.asInstanceOf[Double])
-
- def getBooleanWritable(value: Any): hadoopIo.BooleanWritable =
- if (value == null) null else new hadoopIo.BooleanWritable(value.asInstanceOf[Boolean])
-
- def getLongWritable(value: Any): hadoopIo.LongWritable =
- if (value == null) null else new hadoopIo.LongWritable(value.asInstanceOf[Long])
-
- def getFloatWritable(value: Any): hadoopIo.FloatWritable =
- if (value == null) null else new hadoopIo.FloatWritable(value.asInstanceOf[Float])
-
- def getShortWritable(value: Any): hiveIo.ShortWritable =
- if (value == null) null else new hiveIo.ShortWritable(value.asInstanceOf[Short])
-
- def getByteWritable(value: Any): hiveIo.ByteWritable =
- if (value == null) null else new hiveIo.ByteWritable(value.asInstanceOf[Byte])
-
- def getBinaryWritable(value: Any): hadoopIo.BytesWritable =
- if (value == null) null else new hadoopIo.BytesWritable(value.asInstanceOf[Array[Byte]])
-
- def getDateWritable(value: Any): hiveIo.DateWritable =
- if (value == null) null else new hiveIo.DateWritable(value.asInstanceOf[Int])
-
- def getTimestampWritable(value: Any): hiveIo.TimestampWritable =
- if (value == null) {
- null
- } else {
- new hiveIo.TimestampWritable(value.asInstanceOf[java.sql.Timestamp])
- }
-
- def getDecimalWritable(value: Any): hiveIo.HiveDecimalWritable =
- if (value == null) {
- null
- } else {
- new hiveIo.HiveDecimalWritable(
- HiveShim.createDecimal(value.asInstanceOf[Decimal].toJavaBigDecimal))
- }
-
- def getPrimitiveNullWritable: NullWritable = NullWritable.get()
-
- def createDriverResultsArray = new JArrayList[String]
-
- def processResults(results: JArrayList[String]) = results
-
- def getStatsSetupConstTotalSize = StatsSetupConst.TOTAL_SIZE
-
- def getStatsSetupConstRawDataSize = StatsSetupConst.RAW_DATA_SIZE
-
- def createDefaultDBIfNeeded(context: HiveContext) = { }
-
- def getCommandProcessor(cmd: Array[String], conf: HiveConf) = {
- CommandProcessorFactory.get(cmd(0), conf)
- }
-
- def createDecimal(bd: java.math.BigDecimal): HiveDecimal = {
- new HiveDecimal(bd)
- }
-
- def appendReadColumns(conf: Configuration, ids: Seq[Integer], names: Seq[String]) {
- ColumnProjectionUtils.appendReadColumnIDs(conf, ids)
- ColumnProjectionUtils.appendReadColumnNames(conf, names)
- }
-
- def getExternalTmpPath(context: Context, uri: URI) = {
- context.getExternalTmpFileURI(uri)
- }
-
- def getDataLocationPath(p: Partition) = p.getPartitionPath
-
- def getAllPartitionsOf(client: Hive, tbl: Table) = client.getAllPartitionsForPruner(tbl)
-
- def compatibilityBlackList = Seq(
- "decimal_.*",
- "udf7",
- "drop_partitions_filter2",
- "show_.*",
- "serde_regex",
- "udf_to_date",
- "udaf_collect_set",
- "udf_concat"
- )
-
- def setLocation(tbl: Table, crtTbl: CreateTableDesc): Unit = {
- tbl.setDataLocation(new Path(crtTbl.getLocation()).toUri())
- }
-
- def decimalMetastoreString(decimalType: DecimalType): String = "decimal"
-
- def decimalTypeInfo(decimalType: DecimalType): TypeInfo =
- TypeInfoFactory.decimalTypeInfo
-
- def decimalTypeInfoToCatalyst(inspector: PrimitiveObjectInspector): DecimalType = {
- DecimalType.Unlimited
- }
-
- def toCatalystDecimal(hdoi: HiveDecimalObjectInspector, data: Any): Decimal = {
- if (hdoi.preferWritable()) {
- Decimal(hdoi.getPrimitiveWritableObject(data).getHiveDecimal().bigDecimalValue)
- } else {
- Decimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue())
- }
- }
-
- def getConvertedOI(
- inputOI: ObjectInspector,
- outputOI: ObjectInspector): ObjectInspector = {
- ObjectInspectorConverters.getConvertedOI(inputOI, outputOI, true)
- }
-
- def prepareWritable(w: Writable): Writable = {
- w
- }
-
- def setTblNullFormat(crtTbl: CreateTableDesc, tbl: Table) = {}
-}
-
-private[hive] class ShimFileSinkDesc(
- var dir: String,
- var tableInfo: TableDesc,
- var compressed: Boolean)
- extends FileSinkDesc(dir, tableInfo, compressed) {
-}