aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-06-24 10:35:54 +0100
committerSean Owen <sowen@cloudera.com>2016-06-24 10:35:54 +0100
commit158af162eac7348464c6751c8acd48fc6c117688 (patch)
tree34dc5a02a2cda001a84698e52a37092b8ea087ee /sql
parentf4fd7432fb9cf7b197ccada1378c4f2a6d427522 (diff)
downloadspark-158af162eac7348464c6751c8acd48fc6c117688.tar.gz
spark-158af162eac7348464c6751c8acd48fc6c117688.tar.bz2
spark-158af162eac7348464c6751c8acd48fc6c117688.zip
[SPARK-16129][CORE][SQL] Eliminate direct use of commons-lang classes in favor of commons-lang3
## What changes were proposed in this pull request? Replace use of `commons-lang` in favor of `commons-lang3` and forbid the former via scalastyle; remove `NotImplementedException` from `comons-lang` in favor of JDK `UnsupportedOperationException` ## How was this patch tested? Jenkins tests Author: Sean Owen <sowen@cloudera.com> Closes #13843 from srowen/SPARK-16129.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala2
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java25
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java17
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java6
-rw-r--r--sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java12
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala2
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java2
9 files changed, 32 insertions, 38 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala
index 83fa447cf8..66c4bf29ea 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TimeWindow.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.expressions
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index 3cc7a1a3ca..072445af4f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -23,7 +23,7 @@ import scala.collection.Map
import scala.collection.mutable.Stack
import scala.reflect.ClassTag
-import org.apache.commons.lang.ClassUtils
+import org.apache.commons.lang3.ClassUtils
import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
index 662a03d3b5..a18b881c78 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.datasources.parquet;
import java.io.IOException;
-import org.apache.commons.lang.NotImplementedException;
import org.apache.parquet.bytes.BytesUtils;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.Dictionary;
@@ -228,7 +227,7 @@ public class VectorizedColumnReader {
column.putShort(i, (short) dictionary.decodeToInt(dictionaryIds.getInt(i)));
}
} else {
- throw new NotImplementedException("Unimplemented type: " + column.dataType());
+ throw new UnsupportedOperationException("Unimplemented type: " + column.dataType());
}
break;
@@ -239,7 +238,7 @@ public class VectorizedColumnReader {
column.putLong(i, dictionary.decodeToLong(dictionaryIds.getInt(i)));
}
} else {
- throw new NotImplementedException("Unimplemented type: " + column.dataType());
+ throw new UnsupportedOperationException("Unimplemented type: " + column.dataType());
}
break;
@@ -262,7 +261,7 @@ public class VectorizedColumnReader {
column.putLong(i, ParquetRowConverter.binaryToSQLTimestamp(v));
}
} else {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
break;
case BINARY:
@@ -293,12 +292,12 @@ public class VectorizedColumnReader {
column.putByteArray(i, v.getBytes());
}
} else {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
break;
default:
- throw new NotImplementedException("Unsupported type: " + descriptor.getType());
+ throw new UnsupportedOperationException("Unsupported type: " + descriptor.getType());
}
}
@@ -327,7 +326,7 @@ public class VectorizedColumnReader {
defColumn.readShorts(
num, column, rowId, maxDefLevel, (VectorizedValuesReader) dataColumn);
} else {
- throw new NotImplementedException("Unimplemented type: " + column.dataType());
+ throw new UnsupportedOperationException("Unimplemented type: " + column.dataType());
}
}
@@ -360,7 +359,7 @@ public class VectorizedColumnReader {
defColumn.readDoubles(
num, column, rowId, maxDefLevel, (VectorizedValuesReader) dataColumn);
} else {
- throw new NotImplementedException("Unimplemented type: " + column.dataType());
+ throw new UnsupportedOperationException("Unimplemented type: " + column.dataType());
}
}
@@ -381,7 +380,7 @@ public class VectorizedColumnReader {
}
}
} else {
- throw new NotImplementedException("Unimplemented type: " + column.dataType());
+ throw new UnsupportedOperationException("Unimplemented type: " + column.dataType());
}
}
@@ -417,7 +416,7 @@ public class VectorizedColumnReader {
}
}
} else {
- throw new NotImplementedException("Unimplemented type: " + column.dataType());
+ throw new UnsupportedOperationException("Unimplemented type: " + column.dataType());
}
}
@@ -459,13 +458,13 @@ public class VectorizedColumnReader {
@SuppressWarnings("deprecation")
Encoding plainDict = Encoding.PLAIN_DICTIONARY; // var to allow warning suppression
if (dataEncoding != plainDict && dataEncoding != Encoding.RLE_DICTIONARY) {
- throw new NotImplementedException("Unsupported encoding: " + dataEncoding);
+ throw new UnsupportedOperationException("Unsupported encoding: " + dataEncoding);
}
this.dataColumn = new VectorizedRleValuesReader();
this.useDictionary = true;
} else {
if (dataEncoding != Encoding.PLAIN) {
- throw new NotImplementedException("Unsupported encoding: " + dataEncoding);
+ throw new UnsupportedOperationException("Unsupported encoding: " + dataEncoding);
}
this.dataColumn = new VectorizedPlainValuesReader();
this.useDictionary = false;
@@ -485,7 +484,7 @@ public class VectorizedColumnReader {
// Initialize the decoders.
if (page.getDlEncoding() != Encoding.RLE && descriptor.getMaxDefinitionLevel() != 0) {
- throw new NotImplementedException("Unsupported encoding: " + page.getDlEncoding());
+ throw new UnsupportedOperationException("Unsupported encoding: " + page.getDlEncoding());
}
int bitWidth = BytesUtils.getWidthFromMaxInt(descriptor.getMaxDefinitionLevel());
this.defColumn = new VectorizedRleValuesReader(bitWidth);
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java
index 80c84b1336..bbbb796aca 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVector.java
@@ -20,7 +20,6 @@ import java.math.BigDecimal;
import java.math.BigInteger;
import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.lang.NotImplementedException;
import org.apache.parquet.column.Dictionary;
import org.apache.parquet.io.api.Binary;
@@ -100,7 +99,7 @@ public abstract class ColumnVector implements AutoCloseable {
@Override
public ArrayData copy() {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
// TODO: this is extremely expensive.
@@ -171,7 +170,7 @@ public abstract class ColumnVector implements AutoCloseable {
}
}
} else {
- throw new NotImplementedException("Type " + dt);
+ throw new UnsupportedOperationException("Type " + dt);
}
return list;
}
@@ -181,7 +180,7 @@ public abstract class ColumnVector implements AutoCloseable {
@Override
public boolean getBoolean(int ordinal) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
@@ -189,7 +188,7 @@ public abstract class ColumnVector implements AutoCloseable {
@Override
public short getShort(int ordinal) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
@@ -200,7 +199,7 @@ public abstract class ColumnVector implements AutoCloseable {
@Override
public float getFloat(int ordinal) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
@@ -240,12 +239,12 @@ public abstract class ColumnVector implements AutoCloseable {
@Override
public MapData getMap(int ordinal) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
public Object get(int ordinal, DataType dataType) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
}
@@ -562,7 +561,7 @@ public abstract class ColumnVector implements AutoCloseable {
* Returns the value for rowId.
*/
public MapData getMap(int ordinal) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
/**
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
index f50c35fc64..2fa476b9cf 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
@@ -23,8 +23,6 @@ import java.sql.Date;
import java.util.Iterator;
import java.util.List;
-import org.apache.commons.lang.NotImplementedException;
-
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.catalyst.InternalRow;
@@ -112,7 +110,7 @@ public class ColumnVectorUtils {
}
return result;
} else {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
}
@@ -161,7 +159,7 @@ public class ColumnVectorUtils {
} else if (t instanceof DateType) {
dst.appendInt(DateTimeUtils.fromJavaDate((Date)o));
} else {
- throw new NotImplementedException("Type " + t);
+ throw new UnsupportedOperationException("Type " + t);
}
}
}
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java
index 8cece73faa..f3afa8f938 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnarBatch.java
@@ -19,8 +19,6 @@ package org.apache.spark.sql.execution.vectorized;
import java.math.BigDecimal;
import java.util.*;
-import org.apache.commons.lang.NotImplementedException;
-
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow;
@@ -166,7 +164,7 @@ public final class ColumnarBatch {
@Override
public boolean anyNull() {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
@@ -227,12 +225,12 @@ public final class ColumnarBatch {
@Override
public MapData getMap(int ordinal) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
public Object get(int ordinal, DataType dataType) {
- throw new NotImplementedException();
+ throw new UnsupportedOperationException();
}
@Override
@@ -258,7 +256,7 @@ public final class ColumnarBatch {
setDecimal(ordinal, Decimal.apply((BigDecimal) value, t.precision(), t.scale()),
t.precision());
} else {
- throw new NotImplementedException("Datatype not supported " + dt);
+ throw new UnsupportedOperationException("Datatype not supported " + dt);
}
}
}
@@ -430,7 +428,7 @@ public final class ColumnarBatch {
*/
public void setColumn(int ordinal, ColumnVector column) {
if (column instanceof OffHeapColumnVector) {
- throw new NotImplementedException("Need to ref count columns.");
+ throw new UnsupportedOperationException("Need to ref count columns.");
}
columns[ordinal] = column;
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala
index e2c23a4ba8..09203e6998 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.execution
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{AnalysisException, Row, SparkSession, SQLContext}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala
index 02866c76cb..079e122a5a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.columnar
import scala.collection.JavaConverters._
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.rdd.RDD
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index cc3e807e7a..47bfaa8602 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -29,7 +29,7 @@ import java.util.Map;
import java.util.Set;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.common.cli.HiveFileProcessor;