aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorDaoyuan Wang <daoyuan.wang@intel.com>2015-01-14 09:36:59 -0800
committerReynold Xin <rxin@databricks.com>2015-01-14 09:36:59 -0800
commita3f7421b42f45e39f3e53679188e4eae2ed1f208 (patch)
treef33d6593e0ead692e7845fb0a4f24088a78fbcac /sql/hive
parentd5eeb35167e1ab72fab7778757163ff0aacaef2c (diff)
downloadspark-a3f7421b42f45e39f3e53679188e4eae2ed1f208.tar.gz
spark-a3f7421b42f45e39f3e53679188e4eae2ed1f208.tar.bz2
spark-a3f7421b42f45e39f3e53679188e4eae2ed1f208.zip
[SPARK-5248] [SQL] move sql.types.decimal.Decimal to sql.types.Decimal
rxin follow up of #3732 Author: Daoyuan Wang <daoyuan.wang@intel.com> Closes #4041 from adrian-wang/decimal and squashes the following commits: aa3d738 [Daoyuan Wang] fix auto refactor 7777a58 [Daoyuan Wang] move sql.types.decimal.Decimal to sql.types.Decimal
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala1
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala1
-rw-r--r--sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala3
-rw-r--r--sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala3
5 files changed, 3 insertions, 8 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index 245b847cf4..5140d2064c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -26,7 +26,6 @@ import org.apache.hadoop.{io => hadoopIo}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/* Implicit conversions */
import scala.collection.JavaConversions._
@@ -43,7 +42,7 @@ import scala.collection.JavaConversions._
* long / scala.Long
* short / scala.Short
* byte / scala.Byte
- * org.apache.spark.sql.types.decimal.Decimal
+ * org.apache.spark.sql.types.Decimal
* Array[Byte]
* java.sql.Date
* java.sql.Timestamp
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index b13ef7276b..5e29e57d93 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -35,7 +35,6 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.ExplainCommand
import org.apache.spark.sql.hive.execution.{HiveNativeCommand, DropTable, AnalyzeTable}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/* Implicit conversions */
import scala.collection.JavaConversions._
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
index dc23d9a101..4864607252 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
@@ -31,7 +31,6 @@ import org.scalatest.FunSuite
import org.apache.spark.sql.catalyst.expressions.{Literal, Row}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
class HiveInspectorSuite extends FunSuite with HiveInspectors {
test("Test wrap SettableStructObjectInspector") {
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
index a5587460fd..58417a15bb 100644
--- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
+++ b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
@@ -41,8 +41,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, TypeInfoFactory}
import org.apache.hadoop.io.NullWritable
import org.apache.hadoop.mapred.InputFormat
-import org.apache.spark.sql.types.DecimalType
-import org.apache.spark.sql.types.decimal.Decimal
+import org.apache.spark.sql.types.{Decimal, DecimalType}
case class HiveFunctionWrapper(functionClassName: String) extends java.io.Serializable {
// for Serialization
diff --git a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
index a7121360dd..1f768ca971 100644
--- a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
+++ b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
@@ -42,8 +42,7 @@ import org.apache.hadoop.hive.serde2.{io => hiveIo}
import org.apache.hadoop.{io => hadoopIo}
import org.apache.spark.Logging
-import org.apache.spark.sql.types.DecimalType
-import org.apache.spark.sql.types.decimal.Decimal
+import org.apache.spark.sql.types.{Decimal, DecimalType}
/**