aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-01-04 18:02:38 -0800
committerMichael Armbrust <michael@databricks.com>2016-01-04 18:02:38 -0800
commit77ab49b8575d2ebd678065fa70b0343d532ab9c2 (patch)
treef9c4a990499d1856494f787f8bfc095d68a69735 /sql/catalyst
parentfdfac22d08fc4fdc640843dd93a29e2ce4aee2ef (diff)
downloadspark-77ab49b8575d2ebd678065fa70b0343d532ab9c2.tar.gz
spark-77ab49b8575d2ebd678065fa70b0343d532ab9c2.tar.bz2
spark-77ab49b8575d2ebd678065fa70b0343d532ab9c2.zip
[SPARK-12600][SQL] Remove deprecated methods in Spark SQL
Author: Reynold Xin <rxin@databricks.com> Closes #10559 from rxin/remove-deprecated-sql.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/LegacyTypeStringParser.scala92
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala79
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala36
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala11
4 files changed, 99 insertions, 119 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/LegacyTypeStringParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/LegacyTypeStringParser.scala
new file mode 100644
index 0000000000..e27cf9c198
--- /dev/null
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/LegacyTypeStringParser.scala
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.util
+
+import scala.util.parsing.combinator.RegexParsers
+
+import org.apache.spark.sql.types._
+
+/**
+ * Parser that turns case class strings into datatypes. This is only here to maintain compatibility
+ * with Parquet files written by Spark 1.1 and below.
+ */
+object LegacyTypeStringParser extends RegexParsers {
+
+ protected lazy val primitiveType: Parser[DataType] =
+ ( "StringType" ^^^ StringType
+ | "FloatType" ^^^ FloatType
+ | "IntegerType" ^^^ IntegerType
+ | "ByteType" ^^^ ByteType
+ | "ShortType" ^^^ ShortType
+ | "DoubleType" ^^^ DoubleType
+ | "LongType" ^^^ LongType
+ | "BinaryType" ^^^ BinaryType
+ | "BooleanType" ^^^ BooleanType
+ | "DateType" ^^^ DateType
+ | "DecimalType()" ^^^ DecimalType.USER_DEFAULT
+ | fixedDecimalType
+ | "TimestampType" ^^^ TimestampType
+ )
+
+ protected lazy val fixedDecimalType: Parser[DataType] =
+ ("DecimalType(" ~> "[0-9]+".r) ~ ("," ~> "[0-9]+".r <~ ")") ^^ {
+ case precision ~ scale => DecimalType(precision.toInt, scale.toInt)
+ }
+
+ protected lazy val arrayType: Parser[DataType] =
+ "ArrayType" ~> "(" ~> dataType ~ "," ~ boolVal <~ ")" ^^ {
+ case tpe ~ _ ~ containsNull => ArrayType(tpe, containsNull)
+ }
+
+ protected lazy val mapType: Parser[DataType] =
+ "MapType" ~> "(" ~> dataType ~ "," ~ dataType ~ "," ~ boolVal <~ ")" ^^ {
+ case t1 ~ _ ~ t2 ~ _ ~ valueContainsNull => MapType(t1, t2, valueContainsNull)
+ }
+
+ protected lazy val structField: Parser[StructField] =
+ ("StructField(" ~> "[a-zA-Z0-9_]*".r) ~ ("," ~> dataType) ~ ("," ~> boolVal <~ ")") ^^ {
+ case name ~ tpe ~ nullable =>
+ StructField(name, tpe, nullable = nullable)
+ }
+
+ protected lazy val boolVal: Parser[Boolean] =
+ ( "true" ^^^ true
+ | "false" ^^^ false
+ )
+
+ protected lazy val structType: Parser[DataType] =
+ "StructType\\([A-zA-z]*\\(".r ~> repsep(structField, ",") <~ "))" ^^ {
+ case fields => StructType(fields)
+ }
+
+ protected lazy val dataType: Parser[DataType] =
+ ( arrayType
+ | mapType
+ | structType
+ | primitiveType
+ )
+
+ /**
+ * Parses a string representation of a DataType.
+ */
+ def parse(asString: String): DataType = parseAll(dataType, asString) match {
+ case Success(result, _) => result
+ case failure: NoSuccess =>
+ throw new IllegalArgumentException(s"Unsupported dataType: $asString, $failure")
+ }
+}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index f8d71c5f02..301b3a70f6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -17,9 +17,6 @@
package org.apache.spark.sql.types
-import scala.util.Try
-import scala.util.parsing.combinator.RegexParsers
-
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s._
@@ -94,18 +91,9 @@ abstract class DataType extends AbstractDataType {
object DataType {
- private[sql] def fromString(raw: String): DataType = {
- Try(DataType.fromJson(raw)).getOrElse(DataType.fromCaseClassString(raw))
- }
def fromJson(json: String): DataType = parseDataType(parse(json))
- /**
- * @deprecated As of 1.2.0, replaced by `DataType.fromJson()`
- */
- @deprecated("Use DataType.fromJson instead", "1.2.0")
- def fromCaseClassString(string: String): DataType = CaseClassStringParser(string)
-
private val nonDecimalNameToType = {
Seq(NullType, DateType, TimestampType, BinaryType, IntegerType, BooleanType, LongType,
DoubleType, FloatType, ShortType, ByteType, StringType, CalendarIntervalType)
@@ -184,73 +172,6 @@ object DataType {
StructField(name, parseDataType(dataType), nullable)
}
- private object CaseClassStringParser extends RegexParsers {
- protected lazy val primitiveType: Parser[DataType] =
- ( "StringType" ^^^ StringType
- | "FloatType" ^^^ FloatType
- | "IntegerType" ^^^ IntegerType
- | "ByteType" ^^^ ByteType
- | "ShortType" ^^^ ShortType
- | "DoubleType" ^^^ DoubleType
- | "LongType" ^^^ LongType
- | "BinaryType" ^^^ BinaryType
- | "BooleanType" ^^^ BooleanType
- | "DateType" ^^^ DateType
- | "DecimalType()" ^^^ DecimalType.USER_DEFAULT
- | fixedDecimalType
- | "TimestampType" ^^^ TimestampType
- )
-
- protected lazy val fixedDecimalType: Parser[DataType] =
- ("DecimalType(" ~> "[0-9]+".r) ~ ("," ~> "[0-9]+".r <~ ")") ^^ {
- case precision ~ scale => DecimalType(precision.toInt, scale.toInt)
- }
-
- protected lazy val arrayType: Parser[DataType] =
- "ArrayType" ~> "(" ~> dataType ~ "," ~ boolVal <~ ")" ^^ {
- case tpe ~ _ ~ containsNull => ArrayType(tpe, containsNull)
- }
-
- protected lazy val mapType: Parser[DataType] =
- "MapType" ~> "(" ~> dataType ~ "," ~ dataType ~ "," ~ boolVal <~ ")" ^^ {
- case t1 ~ _ ~ t2 ~ _ ~ valueContainsNull => MapType(t1, t2, valueContainsNull)
- }
-
- protected lazy val structField: Parser[StructField] =
- ("StructField(" ~> "[a-zA-Z0-9_]*".r) ~ ("," ~> dataType) ~ ("," ~> boolVal <~ ")") ^^ {
- case name ~ tpe ~ nullable =>
- StructField(name, tpe, nullable = nullable)
- }
-
- protected lazy val boolVal: Parser[Boolean] =
- ( "true" ^^^ true
- | "false" ^^^ false
- )
-
- protected lazy val structType: Parser[DataType] =
- "StructType\\([A-zA-z]*\\(".r ~> repsep(structField, ",") <~ "))" ^^ {
- case fields => StructType(fields)
- }
-
- protected lazy val dataType: Parser[DataType] =
- ( arrayType
- | mapType
- | structType
- | primitiveType
- )
-
- /**
- * Parses a string representation of a DataType.
- *
- * TODO: Generate parser as pickler...
- */
- def apply(asString: String): DataType = parseAll(dataType, asString) match {
- case Success(result, _) => result
- case failure: NoSuccess =>
- throw new IllegalArgumentException(s"Unsupported dataType: $asString, $failure")
- }
- }
-
protected[types] def buildFormattedString(
dataType: DataType,
prefix: String,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index ce45245b9f..fdae2e82a0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -20,25 +20,10 @@ package org.apache.spark.sql.types
import scala.reflect.runtime.universe.typeTag
import org.apache.spark.annotation.DeveloperApi
-import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.sql.catalyst.expressions.Expression
-/** Precision parameters for a Decimal */
-@deprecated("Use DecimalType(precision, scale) directly", "1.5")
-case class PrecisionInfo(precision: Int, scale: Int) {
- if (scale > precision) {
- throw new AnalysisException(
- s"Decimal scale ($scale) cannot be greater than precision ($precision).")
- }
- if (precision > DecimalType.MAX_PRECISION) {
- throw new AnalysisException(
- s"DecimalType can only support precision up to 38"
- )
- }
-}
-
/**
* :: DeveloperApi ::
* The data type representing `java.math.BigDecimal` values.
@@ -58,15 +43,6 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
def this(precision: Int) = this(precision, 0)
def this() = this(10)
- @deprecated("Use DecimalType(precision, scale) instead", "1.5")
- def this(precisionInfo: Option[PrecisionInfo]) {
- this(precisionInfo.getOrElse(PrecisionInfo(10, 0)).precision,
- precisionInfo.getOrElse(PrecisionInfo(10, 0)).scale)
- }
-
- @deprecated("Use DecimalType.precision and DecimalType.scale instead", "1.5")
- val precisionInfo = Some(PrecisionInfo(precision, scale))
-
private[sql] type InternalType = Decimal
@transient private[sql] lazy val tag = ScalaReflectionLock.synchronized { typeTag[InternalType] }
private[sql] val numeric = Decimal.DecimalIsFractional
@@ -122,9 +98,6 @@ object DecimalType extends AbstractDataType {
val SYSTEM_DEFAULT: DecimalType = DecimalType(MAX_PRECISION, 18)
val USER_DEFAULT: DecimalType = DecimalType(10, 0)
- @deprecated("Does not support unlimited precision, please specify the precision and scale", "1.5")
- val Unlimited: DecimalType = SYSTEM_DEFAULT
-
// The decimal types compatible with other numeric types
private[sql] val ByteDecimal = DecimalType(3, 0)
private[sql] val ShortDecimal = DecimalType(5, 0)
@@ -142,15 +115,6 @@ object DecimalType extends AbstractDataType {
case DoubleType => DoubleDecimal
}
- @deprecated("please specify precision and scale", "1.5")
- def apply(): DecimalType = USER_DEFAULT
-
- @deprecated("Use DecimalType(precision, scale) instead", "1.5")
- def apply(precisionInfo: Option[PrecisionInfo]) {
- this(precisionInfo.getOrElse(PrecisionInfo(10, 0)).precision,
- precisionInfo.getOrElse(PrecisionInfo(10, 0)).scale)
- }
-
private[sql] def bounded(precision: Int, scale: Int): DecimalType = {
DecimalType(min(precision, MAX_PRECISION), min(scale, MAX_SCALE))
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index d568022765..34382bf124 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -18,13 +18,14 @@
package org.apache.spark.sql.types
import scala.collection.mutable.ArrayBuffer
+import scala.util.Try
import org.json4s.JsonDSL._
import org.apache.spark.SparkException
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, InterpretedOrdering}
-import org.apache.spark.sql.catalyst.util.DataTypeParser
+import org.apache.spark.sql.catalyst.util.{LegacyTypeStringParser, DataTypeParser}
/**
@@ -337,9 +338,11 @@ object StructType extends AbstractDataType {
override private[sql] def simpleString: String = "struct"
- private[sql] def fromString(raw: String): StructType = DataType.fromString(raw) match {
- case t: StructType => t
- case _ => throw new RuntimeException(s"Failed parsing StructType: $raw")
+ private[sql] def fromString(raw: String): StructType = {
+ Try(DataType.fromJson(raw)).getOrElse(LegacyTypeStringParser.parse(raw)) match {
+ case t: StructType => t
+ case _ => throw new RuntimeException(s"Failed parsing StructType: $raw")
+ }
}
def apply(fields: Seq[StructField]): StructType = StructType(fields.toArray)