aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2017-04-10 20:11:56 +0100
committerSean Owen <sowen@cloudera.com>2017-04-10 20:11:56 +0100
commita26e3ed5e414d0a350cfe65dd511b154868b9f1d (patch)
treef8bf8feabae7acdd5b2c29e38273fddb80e3de33 /sql/hive-thriftserver
parentfd711ea13e558f0e7d3e01f08e01444d394499a6 (diff)
downloadspark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.tar.gz
spark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.tar.bz2
spark-a26e3ed5e414d0a350cfe65dd511b154868b9f1d.zip
[SPARK-20156][CORE][SQL][STREAMING][MLLIB] Java String toLowerCase "Turkish locale bug" causes Spark problems
## What changes were proposed in this pull request? Add Locale.ROOT to internal calls to String `toLowerCase`, `toUpperCase`, to avoid inadvertent locale-sensitive variation in behavior (aka the "Turkish locale problem"). The change looks large but it is just adding `Locale.ROOT` (the locale with no country or language specified) to every call to these methods. ## How was this patch tested? Existing tests. Author: Sean Owen <sowen@cloudera.com> Closes #17527 from srowen/SPARK-20156.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java5
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java3
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/Type.java3
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala2
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala4
5 files changed, 10 insertions, 7 deletions
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 1e6ac4f3df..c5ade65283 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import javax.net.ssl.SSLServerSocket;
@@ -259,12 +260,12 @@ public class HiveAuthFactory {
if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) {
List<String> sslVersionBlacklistLocal = new ArrayList<String>();
for (String sslVersion : sslVersionBlacklist) {
- sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase());
+ sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase(Locale.ROOT));
}
SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket();
List<String> enabledProtocols = new ArrayList<String>();
for (String protocol : sslServerSocket.getEnabledProtocols()) {
- if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) {
+ if (sslVersionBlacklistLocal.contains(protocol.toLowerCase(Locale.ROOT))) {
LOG.debug("Disabling SSL Protocol: " + protocol);
} else {
enabledProtocols.add(protocol);
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java
index ab3ac6285a..ad4dfd75f4 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java
@@ -19,6 +19,7 @@
package org.apache.hive.service.auth;
import java.util.HashMap;
+import java.util.Locale;
import java.util.Map;
/**
@@ -52,7 +53,7 @@ public enum SaslQOP {
public static SaslQOP fromString(String str) {
if (str != null) {
- str = str.toLowerCase();
+ str = str.toLowerCase(Locale.ROOT);
}
SaslQOP saslQOP = STR_TO_ENUM.get(str);
if (saslQOP == null) {
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/Type.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/Type.java
index a96d2ac371..7752ec03a2 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/Type.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/Type.java
@@ -19,6 +19,7 @@
package org.apache.hive.service.cli;
import java.sql.DatabaseMetaData;
+import java.util.Locale;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hive.service.cli.thrift.TTypeId;
@@ -160,7 +161,7 @@ public enum Type {
if (name.equalsIgnoreCase(type.name)) {
return type;
} else if (type.isQualifiedType() || type.isComplexType()) {
- if (name.toUpperCase().startsWith(type.name)) {
+ if (name.toUpperCase(Locale.ROOT).startsWith(type.name)) {
return type;
}
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
index 14553601b1..5e4734ad3a 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
@@ -294,7 +294,7 @@ private[hive] class HiveThriftServer2(sqlContext: SQLContext)
private def isHTTPTransportMode(hiveConf: HiveConf): Boolean = {
val transportMode = hiveConf.getVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE)
- transportMode.toLowerCase(Locale.ENGLISH).equals("http")
+ transportMode.toLowerCase(Locale.ROOT).equals("http")
}
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 1bc5c3c62f..d5cc3b3855 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -302,7 +302,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
override def processCmd(cmd: String): Int = {
val cmd_trimmed: String = cmd.trim()
- val cmd_lower = cmd_trimmed.toLowerCase(Locale.ENGLISH)
+ val cmd_lower = cmd_trimmed.toLowerCase(Locale.ROOT)
val tokens: Array[String] = cmd_trimmed.split("\\s+")
val cmd_1: String = cmd_trimmed.substring(tokens(0).length()).trim()
if (cmd_lower.equals("quit") ||
@@ -310,7 +310,7 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
sessionState.close()
System.exit(0)
}
- if (tokens(0).toLowerCase(Locale.ENGLISH).equals("source") ||
+ if (tokens(0).toLowerCase(Locale.ROOT).equals("source") ||
cmd_trimmed.startsWith("!") || isRemoteMode) {
val start = System.currentTimeMillis()
super.processCmd(cmd)