aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDaoyuan Wang <daoyuan.wang@intel.com>2015-01-14 09:36:59 -0800
committerReynold Xin <rxin@databricks.com>2015-01-14 09:36:59 -0800
commita3f7421b42f45e39f3e53679188e4eae2ed1f208 (patch)
treef33d6593e0ead692e7845fb0a4f24088a78fbcac /sql
parentd5eeb35167e1ab72fab7778757163ff0aacaef2c (diff)
downloadspark-a3f7421b42f45e39f3e53679188e4eae2ed1f208.tar.gz
spark-a3f7421b42f45e39f3e53679188e4eae2ed1f208.tar.bz2
spark-a3f7421b42f45e39f3e53679188e4eae2ed1f208.zip
[SPARK-5248] [SQL] move sql.types.decimal.Decimal to sql.types.Decimal
rxin follow up of #3732 Author: Daoyuan Wang <daoyuan.wang@intel.com> Closes #4041 from adrian-wang/decimal and squashes the following commits: aa3d738 [Daoyuan Wang] fix auto refactor 7777a58 [Daoyuan Wang] move sql.types.decimal.Decimal to sql.types.Decimal
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala1
-rwxr-xr-xsql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala (renamed from sql/catalyst/src/main/scala/org/apache/spark/sql/types/decimal/Decimal.scala)4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala1
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala1
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala1
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala3
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala1
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala1
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala1
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala1
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala1
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala1
-rw-r--r--sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala3
-rw-r--r--sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala3
22 files changed, 13 insertions, 29 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index d169da691d..697bacfedc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -23,7 +23,6 @@ import org.apache.spark.util.Utils
import org.apache.spark.sql.catalyst.expressions.{GenericRow, Attribute, AttributeReference, Row}
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index bdac7504ed..8bc36a238d 100755
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -27,7 +27,6 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/**
* A collection of implicit conversions that create a DSL for constructing catalyst data structures.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 00961f0991..1a2133bbbc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -23,7 +23,6 @@ import java.text.{DateFormat, SimpleDateFormat}
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/** Cast the child expression to the target data type. */
case class Cast(child: Expression, dataType: DataType) extends UnaryExpression with Logging {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index a5d6423391..4cae5c4718 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -18,7 +18,6 @@
package org.apache.spark.sql.catalyst.expressions.codegen
import com.google.common.cache.{CacheLoader, CacheBuilder}
-import org.apache.spark.sql.types.decimal.Decimal
import scala.language.existentials
@@ -541,11 +540,11 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
childEval.code ++
q"""
var $nullTerm = ${childEval.nullTerm}
- var $primitiveTerm: org.apache.spark.sql.types.decimal.Decimal =
+ var $primitiveTerm: org.apache.spark.sql.types.Decimal =
${defaultPrimitive(DecimalType())}
if (!$nullTerm) {
- $primitiveTerm = new org.apache.spark.sql.types.decimal.Decimal()
+ $primitiveTerm = new org.apache.spark.sql.types.Decimal()
$primitiveTerm = $primitiveTerm.setOrNull(${childEval.primitiveTerm}, $precision, $scale)
$nullTerm = $primitiveTerm == null
}
@@ -627,7 +626,7 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
case LongType => ru.Literal(Constant(1L))
case ByteType => ru.Literal(Constant(-1.toByte))
case DoubleType => ru.Literal(Constant(-1.toDouble))
- case DecimalType() => q"org.apache.spark.sql.types.decimal.Decimal(-1)"
+ case DecimalType() => q"org.apache.spark.sql.types.Decimal(-1)"
case IntegerType => ru.Literal(Constant(-1))
case _ => ru.Literal(Constant(null))
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala
index e54cfa144a..83d8c1d42b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala
@@ -17,8 +17,7 @@
package org.apache.spark.sql.catalyst.expressions
-import org.apache.spark.sql.types.decimal.Decimal
-import org.apache.spark.sql.types.{DecimalType, LongType, DoubleType, DataType}
+import org.apache.spark.sql.types._
/** Return the unscaled Long value of a Decimal, assuming it fits in a Long */
case class UnscaledValue(child: Expression) extends UnaryExpression {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 8ee4bbd8ca..c94a947fb2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -20,7 +20,6 @@ package org.apache.spark.sql.catalyst.expressions
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
object Literal {
def apply(v: Any): Literal = v match {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 17b4f9c23a..d4a4c35691 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -27,7 +27,6 @@ import org.apache.spark.sql.catalyst.plans.LeftSemi
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
abstract class Optimizer extends RuleExecutor[LogicalPlan]
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
index 2a8914cde2..08bb933a2b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.types
import java.text.SimpleDateFormat
import org.apache.spark.sql.catalyst.ScalaReflection
-import org.apache.spark.sql.types.decimal.Decimal
+import org.apache.spark.sql.types.Decimal
protected[sql] object DataTypeConversions {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/decimal/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index c7864d1ae9..3744d77c07 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/decimal/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.spark.sql.types.decimal
+package org.apache.spark.sql.types
import org.apache.spark.annotation.DeveloperApi
@@ -28,7 +28,7 @@ import org.apache.spark.annotation.DeveloperApi
* - Otherwise, the decimal value is longVal / (10 ** _scale)
*/
final class Decimal extends Ordered[Decimal] with Serializable {
- import Decimal.{MAX_LONG_DIGITS, POW_10, ROUNDING_MODE, BIG_DEC_ZERO}
+ import org.apache.spark.sql.types.Decimal.{BIG_DEC_ZERO, MAX_LONG_DIGITS, POW_10, ROUNDING_MODE}
private var decimalVal: BigDecimal = null
private var longVal: Long = 0L
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala
index e38ad63f2e..e1cbe6650a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala
@@ -32,7 +32,6 @@ import org.json4s.jackson.JsonMethods._
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression}
-import org.apache.spark.sql.types.decimal._
import org.apache.spark.util.Utils
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 8552448b8d..37e64adeea 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -27,7 +27,6 @@ import org.scalatest.Matchers._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
class ExpressionEvaluationSuite extends FunSuite {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
index 813377df00..de6a2cd448 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/decimal/DecimalSuite.scala
@@ -17,6 +17,7 @@
package org.apache.spark.sql.types.decimal
+import org.apache.spark.sql.types.Decimal
import org.scalatest.{PrivateMethodTester, FunSuite}
import scala.language.postfixOps
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala
index 131146012e..7a0249137a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlSerializer.scala
@@ -19,6 +19,8 @@ package org.apache.spark.sql.execution
import java.nio.ByteBuffer
+import org.apache.spark.sql.types.Decimal
+
import scala.reflect.ClassTag
import com.clearspring.analytics.stream.cardinality.HyperLogLog
@@ -29,7 +31,6 @@ import com.twitter.chill.{AllScalaRegistrar, ResourcePool}
import org.apache.spark.{SparkEnv, SparkConf}
import org.apache.spark.serializer.{SerializerInstance, KryoSerializer}
import org.apache.spark.sql.catalyst.expressions.GenericRow
-import org.apache.spark.sql.types.decimal.Decimal
import org.apache.spark.util.collection.OpenHashSet
import org.apache.spark.util.MutablePair
import org.apache.spark.util.Utils
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
index c92ec543e2..453b560ff8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/json/JsonRDD.scala
@@ -33,7 +33,6 @@ import org.apache.spark.sql.catalyst.analysis.HiveTypeCoercion
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
import org.apache.spark.Logging
private[sql] object JsonRDD extends Logging {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala
index 7f437c4077..b4aed04199 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala
@@ -25,7 +25,6 @@ import parquet.schema.MessageType
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.parquet.CatalystConverter.FieldType
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/**
* Collection of converters of Parquet types (group and primitive types) that
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala
index af7248fdf4..fd63ad8144 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala
@@ -30,7 +30,6 @@ import parquet.schema.MessageType
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.expressions.{Attribute, Row}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/**
* A `parquet.io.api.RecordMaterializer` for Rows.
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index 01c1ce2a61..1dd85a3bb4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -25,7 +25,6 @@ import org.apache.spark.sql.json.JsonRDD.{compatibleType, enforceCorrectType}
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
import org.apache.spark.sql.{QueryTest, Row, SQLConf}
class JsonSuite extends QueryTest {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index 245b847cf4..5140d2064c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -26,7 +26,6 @@ import org.apache.hadoop.{io => hadoopIo}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/* Implicit conversions */
import scala.collection.JavaConversions._
@@ -43,7 +42,7 @@ import scala.collection.JavaConversions._
* long / scala.Long
* short / scala.Short
* byte / scala.Byte
- * org.apache.spark.sql.types.decimal.Decimal
+ * org.apache.spark.sql.types.Decimal
* Array[Byte]
* java.sql.Date
* java.sql.Timestamp
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index b13ef7276b..5e29e57d93 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -35,7 +35,6 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.ExplainCommand
import org.apache.spark.sql.hive.execution.{HiveNativeCommand, DropTable, AnalyzeTable}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
/* Implicit conversions */
import scala.collection.JavaConversions._
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
index dc23d9a101..4864607252 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
@@ -31,7 +31,6 @@ import org.scalatest.FunSuite
import org.apache.spark.sql.catalyst.expressions.{Literal, Row}
import org.apache.spark.sql.types._
-import org.apache.spark.sql.types.decimal.Decimal
class HiveInspectorSuite extends FunSuite with HiveInspectors {
test("Test wrap SettableStructObjectInspector") {
diff --git a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
index a5587460fd..58417a15bb 100644
--- a/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
+++ b/sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala
@@ -41,8 +41,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, TypeInfoFactory}
import org.apache.hadoop.io.NullWritable
import org.apache.hadoop.mapred.InputFormat
-import org.apache.spark.sql.types.DecimalType
-import org.apache.spark.sql.types.decimal.Decimal
+import org.apache.spark.sql.types.{Decimal, DecimalType}
case class HiveFunctionWrapper(functionClassName: String) extends java.io.Serializable {
// for Serialization
diff --git a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
index a7121360dd..1f768ca971 100644
--- a/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
+++ b/sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala
@@ -42,8 +42,7 @@ import org.apache.hadoop.hive.serde2.{io => hiveIo}
import org.apache.hadoop.{io => hadoopIo}
import org.apache.spark.Logging
-import org.apache.spark.sql.types.DecimalType
-import org.apache.spark.sql.types.decimal.Decimal
+import org.apache.spark.sql.types.{Decimal, DecimalType}
/**