aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth
diff options
context:
space:
mode:
Diffstat (limited to 'sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth')
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java33
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java71
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java50
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java364
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java189
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthenticationException.java43
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java111
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java84
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java51
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java39
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java154
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslServer.java177
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java62
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java117
-rw-r--r--sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java70
15 files changed, 1615 insertions, 0 deletions
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
new file mode 100644
index 0000000000..c8f93ff6a5
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+/**
+ * This authentication provider allows any combination of username and password.
+ */
+public class AnonymousAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+
+ @Override
+ public void Authenticate(String user, String password) throws AuthenticationException {
+ // no-op authentication
+ }
+
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
new file mode 100644
index 0000000000..4b95503eb1
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+/**
+ * This class helps select a {@link PasswdAuthenticationProvider} for a given {@code AuthMethod}.
+ */
+public final class AuthenticationProviderFactory {
+
+ public enum AuthMethods {
+ LDAP("LDAP"),
+ PAM("PAM"),
+ CUSTOM("CUSTOM"),
+ NONE("NONE");
+
+ private final String authMethod;
+
+ AuthMethods(String authMethod) {
+ this.authMethod = authMethod;
+ }
+
+ public String getAuthMethod() {
+ return authMethod;
+ }
+
+ public static AuthMethods getValidAuthMethod(String authMethodStr)
+ throws AuthenticationException {
+ for (AuthMethods auth : AuthMethods.values()) {
+ if (authMethodStr.equals(auth.getAuthMethod())) {
+ return auth;
+ }
+ }
+ throw new AuthenticationException("Not a valid authentication method");
+ }
+ }
+
+ private AuthenticationProviderFactory() {
+ }
+
+ public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
+ throws AuthenticationException {
+ if (authMethod == AuthMethods.LDAP) {
+ return new LdapAuthenticationProviderImpl();
+ } else if (authMethod == AuthMethods.PAM) {
+ return new PamAuthenticationProviderImpl();
+ } else if (authMethod == AuthMethods.CUSTOM) {
+ return new CustomAuthenticationProviderImpl();
+ } else if (authMethod == AuthMethods.NONE) {
+ return new AnonymousAuthenticationProviderImpl();
+ } else {
+ throw new AuthenticationException("Unsupported authentication method");
+ }
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
new file mode 100644
index 0000000000..3dc0aa86e2
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * This authentication provider implements the {@code CUSTOM} authentication. It allows a {@link
+ * PasswdAuthenticationProvider} to be specified at configuration time which may additionally
+ * implement {@link org.apache.hadoop.conf.Configurable Configurable} to grab Hive's {@link
+ * org.apache.hadoop.conf.Configuration Configuration}.
+ */
+public class CustomAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+
+ private final PasswdAuthenticationProvider customProvider;
+
+ @SuppressWarnings("unchecked")
+ CustomAuthenticationProviderImpl() {
+ HiveConf conf = new HiveConf();
+ Class<? extends PasswdAuthenticationProvider> customHandlerClass =
+ (Class<? extends PasswdAuthenticationProvider>) conf.getClass(
+ HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
+ PasswdAuthenticationProvider.class);
+ customProvider = ReflectionUtils.newInstance(customHandlerClass, conf);
+ }
+
+ @Override
+ public void Authenticate(String user, String password) throws AuthenticationException {
+ customProvider.Authenticate(user, password);
+ }
+
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
new file mode 100644
index 0000000000..1e6ac4f3df
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -0,0 +1,364 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.net.ssl.SSLServerSocket;
+import javax.security.auth.login.LoginException;
+import javax.security.sasl.Sasl;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.thrift.DBTokenStore;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSSLTransportFactory;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.thrift.transport.TTransportFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class helps in some aspects of authentication. It creates the proper Thrift classes for the
+ * given configuration as well as helps with authenticating requests.
+ */
+public class HiveAuthFactory {
+ private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
+
+
+ public enum AuthTypes {
+ NOSASL("NOSASL"),
+ NONE("NONE"),
+ LDAP("LDAP"),
+ KERBEROS("KERBEROS"),
+ CUSTOM("CUSTOM"),
+ PAM("PAM");
+
+ private final String authType;
+
+ AuthTypes(String authType) {
+ this.authType = authType;
+ }
+
+ public String getAuthName() {
+ return authType;
+ }
+
+ }
+
+ private HadoopThriftAuthBridge.Server saslServer;
+ private String authTypeStr;
+ private final String transportMode;
+ private final HiveConf conf;
+
+ public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
+ public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
+
+ public HiveAuthFactory(HiveConf conf) throws TTransportException {
+ this.conf = conf;
+ transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
+ authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
+
+ // In http mode we use NOSASL as the default auth type
+ if ("http".equalsIgnoreCase(transportMode)) {
+ if (authTypeStr == null) {
+ authTypeStr = AuthTypes.NOSASL.getAuthName();
+ }
+ } else {
+ if (authTypeStr == null) {
+ authTypeStr = AuthTypes.NONE.getAuthName();
+ }
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+ saslServer = ShimLoader.getHadoopThriftAuthBridge()
+ .createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+ conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
+ // start delegation token manager
+ try {
+ // rawStore is only necessary for DBTokenStore
+ Object rawStore = null;
+ String tokenStoreClass = conf.getVar(HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS);
+
+ if (tokenStoreClass.equals(DBTokenStore.class.getName())) {
+ HMSHandler baseHandler = new HiveMetaStore.HMSHandler(
+ "new db based metaserver", conf, true);
+ rawStore = baseHandler.getMS();
+ }
+
+ saslServer.startDelegationTokenSecretManager(conf, rawStore, ServerMode.HIVESERVER2);
+ }
+ catch (MetaException|IOException e) {
+ throw new TTransportException("Failed to start token manager", e);
+ }
+ }
+ }
+ }
+
+ public Map<String, String> getSaslProperties() {
+ Map<String, String> saslProps = new HashMap<String, String>();
+ SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
+ saslProps.put(Sasl.QOP, saslQOP.toString());
+ saslProps.put(Sasl.SERVER_AUTH, "true");
+ return saslProps;
+ }
+
+ public TTransportFactory getAuthTransFactory() throws LoginException {
+ TTransportFactory transportFactory;
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+ try {
+ transportFactory = saslServer.createTransportFactory(getSaslProperties());
+ } catch (TTransportException e) {
+ throw new LoginException(e.getMessage());
+ }
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
+ transportFactory = new TTransportFactory();
+ } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
+ transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+ } else {
+ throw new LoginException("Unsupported authentication type " + authTypeStr);
+ }
+ return transportFactory;
+ }
+
+ /**
+ * Returns the thrift processor factory for HiveServer2 running in binary mode
+ * @param service
+ * @return
+ * @throws LoginException
+ */
+ public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+ return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
+ } else {
+ return PlainSaslHelper.getPlainProcessorFactory(service);
+ }
+ }
+
+ public String getRemoteUser() {
+ return saslServer == null ? null : saslServer.getRemoteUser();
+ }
+
+ public String getIpAddress() {
+ if (saslServer == null || saslServer.getRemoteAddress() == null) {
+ return null;
+ } else {
+ return saslServer.getRemoteAddress().getHostAddress();
+ }
+ }
+
+ // Perform kerberos login using the hadoop shim API if the configuration is available
+ public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
+ String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
+ String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
+ if (principal.isEmpty() || keyTabFile.isEmpty()) {
+ throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
+ } else {
+ UserGroupInformation.loginUserFromKeytab(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
+ }
+ }
+
+ // Perform SPNEGO login using the hadoop shim API if the configuration is available
+ public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
+ throws IOException {
+ String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
+ String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
+ if (principal.isEmpty() || keyTabFile.isEmpty()) {
+ throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
+ } else {
+ return UserGroupInformation.loginUserFromKeytabAndReturnUGI(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
+ }
+ }
+
+ public static TTransport getSocketTransport(String host, int port, int loginTimeout) {
+ return new TSocket(host, port, loginTimeout);
+ }
+
+ public static TTransport getSSLSocket(String host, int port, int loginTimeout)
+ throws TTransportException {
+ return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
+ }
+
+ public static TTransport getSSLSocket(String host, int port, int loginTimeout,
+ String trustStorePath, String trustStorePassWord) throws TTransportException {
+ TSSLTransportFactory.TSSLTransportParameters params =
+ new TSSLTransportFactory.TSSLTransportParameters();
+ params.setTrustStore(trustStorePath, trustStorePassWord);
+ params.requireClientAuth(true);
+ return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
+ }
+
+ public static TServerSocket getServerSocket(String hiveHost, int portNum)
+ throws TTransportException {
+ InetSocketAddress serverAddress;
+ if (hiveHost == null || hiveHost.isEmpty()) {
+ // Wildcard bind
+ serverAddress = new InetSocketAddress(portNum);
+ } else {
+ serverAddress = new InetSocketAddress(hiveHost, portNum);
+ }
+ return new TServerSocket(serverAddress);
+ }
+
+ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath,
+ String keyStorePassWord, List<String> sslVersionBlacklist) throws TTransportException,
+ UnknownHostException {
+ TSSLTransportFactory.TSSLTransportParameters params =
+ new TSSLTransportFactory.TSSLTransportParameters();
+ params.setKeyStore(keyStorePath, keyStorePassWord);
+ InetSocketAddress serverAddress;
+ if (hiveHost == null || hiveHost.isEmpty()) {
+ // Wildcard bind
+ serverAddress = new InetSocketAddress(portNum);
+ } else {
+ serverAddress = new InetSocketAddress(hiveHost, portNum);
+ }
+ TServerSocket thriftServerSocket =
+ TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params);
+ if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) {
+ List<String> sslVersionBlacklistLocal = new ArrayList<String>();
+ for (String sslVersion : sslVersionBlacklist) {
+ sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase());
+ }
+ SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket();
+ List<String> enabledProtocols = new ArrayList<String>();
+ for (String protocol : sslServerSocket.getEnabledProtocols()) {
+ if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) {
+ LOG.debug("Disabling SSL Protocol: " + protocol);
+ } else {
+ enabledProtocols.add(protocol);
+ }
+ }
+ sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0]));
+ LOG.info("SSL Server Socket Enabled Protocols: "
+ + Arrays.toString(sslServerSocket.getEnabledProtocols()));
+ }
+ return thriftServerSocket;
+ }
+
+ // retrieve delegation token for the given user
+ public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication", "08S01");
+ }
+
+ try {
+ String tokenStr = saslServer.getDelegationTokenWithService(owner, renewer, HS2_CLIENT_TOKEN);
+ if (tokenStr == null || tokenStr.isEmpty()) {
+ throw new HiveSQLException(
+ "Received empty retrieving delegation token for user " + owner, "08S01");
+ }
+ return tokenStr;
+ } catch (IOException e) {
+ throw new HiveSQLException(
+ "Error retrieving delegation token for user " + owner, "08S01", e);
+ } catch (InterruptedException e) {
+ throw new HiveSQLException("delegation token retrieval interrupted", "08S01", e);
+ }
+ }
+
+ // cancel given delegation token
+ public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication", "08S01");
+ }
+ try {
+ saslServer.cancelDelegationToken(delegationToken);
+ } catch (IOException e) {
+ throw new HiveSQLException(
+ "Error canceling delegation token " + delegationToken, "08S01", e);
+ }
+ }
+
+ public void renewDelegationToken(String delegationToken) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication", "08S01");
+ }
+ try {
+ saslServer.renewDelegationToken(delegationToken);
+ } catch (IOException e) {
+ throw new HiveSQLException(
+ "Error renewing delegation token " + delegationToken, "08S01", e);
+ }
+ }
+
+ public String getUserFromToken(String delegationToken) throws HiveSQLException {
+ if (saslServer == null) {
+ throw new HiveSQLException(
+ "Delegation token only supported over kerberos authentication", "08S01");
+ }
+ try {
+ return saslServer.getUserFromToken(delegationToken);
+ } catch (IOException e) {
+ throw new HiveSQLException(
+ "Error extracting user from delegation token " + delegationToken, "08S01", e);
+ }
+ }
+
+ public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
+ HiveConf hiveConf) throws HiveSQLException {
+ try {
+ UserGroupInformation sessionUgi;
+ if (UserGroupInformation.isSecurityEnabled()) {
+ KerberosNameShim kerbName = ShimLoader.getHadoopShims().getKerberosNameShim(realUser);
+ sessionUgi = UserGroupInformation.createProxyUser(
+ kerbName.getServiceName(), UserGroupInformation.getLoginUser());
+ } else {
+ sessionUgi = UserGroupInformation.createRemoteUser(realUser);
+ }
+ if (!proxyUser.equalsIgnoreCase(realUser)) {
+ ProxyUsers.refreshSuperUserGroupsConfiguration(hiveConf);
+ ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi),
+ ipAddress, hiveConf);
+ }
+ } catch (IOException e) {
+ throw new HiveSQLException(
+ "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, "08S01", e);
+ }
+ }
+
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java
new file mode 100644
index 0000000000..3ef55779a6
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import java.security.AccessControlContext;
+import java.security.AccessController;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.http.protocol.BasicHttpContext;
+import org.apache.http.protocol.HttpContext;
+import org.ietf.jgss.GSSContext;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.GSSName;
+import org.ietf.jgss.Oid;
+
+/**
+ * Utility functions for HTTP mode authentication.
+ */
+public final class HttpAuthUtils {
+ public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+ public static final String AUTHORIZATION = "Authorization";
+ public static final String BASIC = "Basic";
+ public static final String NEGOTIATE = "Negotiate";
+ private static final Log LOG = LogFactory.getLog(HttpAuthUtils.class);
+ private static final String COOKIE_ATTR_SEPARATOR = "&";
+ private static final String COOKIE_CLIENT_USER_NAME = "cu";
+ private static final String COOKIE_CLIENT_RAND_NUMBER = "rn";
+ private static final String COOKIE_KEY_VALUE_SEPARATOR = "=";
+ private final static Set<String> COOKIE_ATTRIBUTES =
+ new HashSet<String>(Arrays.asList(COOKIE_CLIENT_USER_NAME, COOKIE_CLIENT_RAND_NUMBER));
+
+ /**
+ * @return Stringified Base64 encoded kerberosAuthHeader on success
+ * @throws Exception
+ */
+ public static String getKerberosServiceTicket(String principal, String host,
+ String serverHttpUrl, boolean assumeSubject) throws Exception {
+ String serverPrincipal =
+ ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(principal, host);
+ if (assumeSubject) {
+ // With this option, we're assuming that the external application,
+ // using the JDBC driver has done a JAAS kerberos login already
+ AccessControlContext context = AccessController.getContext();
+ Subject subject = Subject.getSubject(context);
+ if (subject == null) {
+ throw new Exception("The Subject is not set");
+ }
+ return Subject.doAs(subject, new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
+ } else {
+ // JAAS login from ticket cache to setup the client UserGroupInformation
+ UserGroupInformation clientUGI =
+ ShimLoader.getHadoopThriftAuthBridge().getCurrentUGIWithConf("kerberos");
+ return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal, serverHttpUrl));
+ }
+ }
+
+ /**
+ * Creates and returns a HS2 cookie token.
+ * @param clientUserName Client User name.
+ * @return An unsigned cookie token generated from input parameters.
+ * The final cookie generated is of the following format :
+ * cu=<username>&rn=<randomNumber>&s=<cookieSignature>
+ */
+ public static String createCookieToken(String clientUserName) {
+ StringBuffer sb = new StringBuffer();
+ sb.append(COOKIE_CLIENT_USER_NAME).append(COOKIE_KEY_VALUE_SEPARATOR).append(clientUserName).
+ append(COOKIE_ATTR_SEPARATOR);
+ sb.append(COOKIE_CLIENT_RAND_NUMBER).append(COOKIE_KEY_VALUE_SEPARATOR).
+ append((new Random(System.currentTimeMillis())).nextLong());
+ return sb.toString();
+ }
+
+ /**
+ * Parses a cookie token to retrieve client user name.
+ * @param tokenStr Token String.
+ * @return A valid user name if input is of valid format, else returns null.
+ */
+ public static String getUserNameFromCookieToken(String tokenStr) {
+ Map<String, String> map = splitCookieToken(tokenStr);
+
+ if (!map.keySet().equals(COOKIE_ATTRIBUTES)) {
+ LOG.error("Invalid token with missing attributes " + tokenStr);
+ return null;
+ }
+ return map.get(COOKIE_CLIENT_USER_NAME);
+ }
+
+ /**
+ * Splits the cookie token into attributes pairs.
+ * @param str input token.
+ * @return a map with the attribute pairs of the token if the input is valid.
+ * Else, returns null.
+ */
+ private static Map<String, String> splitCookieToken(String tokenStr) {
+ Map<String, String> map = new HashMap<String, String>();
+ StringTokenizer st = new StringTokenizer(tokenStr, COOKIE_ATTR_SEPARATOR);
+
+ while (st.hasMoreTokens()) {
+ String part = st.nextToken();
+ int separator = part.indexOf(COOKIE_KEY_VALUE_SEPARATOR);
+ if (separator == -1) {
+ LOG.error("Invalid token string " + tokenStr);
+ return null;
+ }
+ String key = part.substring(0, separator);
+ String value = part.substring(separator + 1);
+ map.put(key, value);
+ }
+ return map;
+ }
+
+
+ private HttpAuthUtils() {
+ throw new UnsupportedOperationException("Can't initialize class");
+ }
+
+ /**
+ * We'll create an instance of this class within a doAs block so that the client's TGT credentials
+ * can be read from the Subject
+ */
+ public static class HttpKerberosClientAction implements PrivilegedExceptionAction<String> {
+ public static final String HTTP_RESPONSE = "HTTP_RESPONSE";
+ public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
+ private final String serverPrincipal;
+ private final String serverHttpUrl;
+ private final Base64 base64codec;
+ private final HttpContext httpContext;
+
+ public HttpKerberosClientAction(String serverPrincipal, String serverHttpUrl) {
+ this.serverPrincipal = serverPrincipal;
+ this.serverHttpUrl = serverHttpUrl;
+ base64codec = new Base64(0);
+ httpContext = new BasicHttpContext();
+ httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
+ }
+
+ @Override
+ public String run() throws Exception {
+ // This Oid for Kerberos GSS-API mechanism.
+ Oid mechOid = new Oid("1.2.840.113554.1.2.2");
+ // Oid for kerberos principal name
+ Oid krb5PrincipalOid = new Oid("1.2.840.113554.1.2.2.1");
+ GSSManager manager = GSSManager.getInstance();
+ // GSS name for server
+ GSSName serverName = manager.createName(serverPrincipal, krb5PrincipalOid);
+ // Create a GSSContext for authentication with the service.
+ // We're passing client credentials as null since we want them to be read from the Subject.
+ GSSContext gssContext =
+ manager.createContext(serverName, mechOid, null, GSSContext.DEFAULT_LIFETIME);
+ gssContext.requestMutualAuth(false);
+ // Establish context
+ byte[] inToken = new byte[0];
+ byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
+ gssContext.dispose();
+ // Base64 encoded and stringified token for server
+ return new String(base64codec.encode(outToken));
+ }
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthenticationException.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthenticationException.java
new file mode 100644
index 0000000000..5764325602
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HttpAuthenticationException.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+
+package org.apache.hive.service.auth;
+
+public class HttpAuthenticationException extends Exception {
+
+ private static final long serialVersionUID = 0;
+
+ /**
+ * @param cause original exception
+ */
+ public HttpAuthenticationException(Throwable cause) {
+ super(cause);
+ }
+
+ /**
+ * @param msg exception message
+ */
+ public HttpAuthenticationException(String msg) {
+ super(msg);
+ }
+
+ /**
+ * @param msg exception message
+ * @param cause original exception
+ */
+ public HttpAuthenticationException(String msg, Throwable cause) {
+ super(msg, cause);
+ }
+
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java
new file mode 100644
index 0000000000..11d26699fe
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.util.Map;
+import javax.security.sasl.SaslException;
+
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server;
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TTransport;
+
+public final class KerberosSaslHelper {
+
+ public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
+ ThriftCLIService service) {
+ return new CLIServiceProcessorFactory(saslServer, service);
+ }
+
+ public static TTransport getKerberosTransport(String principal, String host,
+ TTransport underlyingTransport, Map<String, String> saslProps, boolean assumeSubject)
+ throws SaslException {
+ try {
+ String[] names = principal.split("[/@]");
+ if (names.length != 3) {
+ throw new IllegalArgumentException("Kerberos principal should have 3 parts: " + principal);
+ }
+
+ if (assumeSubject) {
+ return createSubjectAssumedTransport(principal, underlyingTransport, saslProps);
+ } else {
+ HadoopThriftAuthBridge.Client authBridge =
+ ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
+ return authBridge.createClientTransport(principal, host, "KERBEROS", null,
+ underlyingTransport, saslProps);
+ }
+ } catch (IOException e) {
+ throw new SaslException("Failed to open client transport", e);
+ }
+ }
+
+ public static TTransport createSubjectAssumedTransport(String principal,
+ TTransport underlyingTransport, Map<String, String> saslProps) throws IOException {
+ String[] names = principal.split("[/@]");
+ try {
+ TTransport saslTransport =
+ new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
+ underlyingTransport);
+ return new TSubjectAssumingTransport(saslTransport);
+ } catch (SaslException se) {
+ throw new IOException("Could not instantiate SASL transport", se);
+ }
+ }
+
+ public static TTransport getTokenTransport(String tokenStr, String host,
+ TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
+ HadoopThriftAuthBridge.Client authBridge =
+ ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
+
+ try {
+ return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
+ saslProps);
+ } catch (IOException e) {
+ throw new SaslException("Failed to open client transport", e);
+ }
+ }
+
+ private KerberosSaslHelper() {
+ throw new UnsupportedOperationException("Can't initialize class");
+ }
+
+ private static class CLIServiceProcessorFactory extends TProcessorFactory {
+
+ private final ThriftCLIService service;
+ private final Server saslServer;
+
+ public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
+ super(null);
+ this.service = service;
+ this.saslServer = saslServer;
+ }
+
+ @Override
+ public TProcessor getProcessor(TTransport trans) {
+ TProcessor sqlProcessor = new TCLIService.Processor<Iface>(service);
+ return saslServer.wrapNonAssumingProcessor(sqlProcessor);
+ }
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
new file mode 100644
index 0000000000..4e2ef90a1e
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.util.Hashtable;
+import javax.naming.Context;
+import javax.naming.NamingException;
+import javax.naming.directory.InitialDirContext;
+import javax.security.sasl.AuthenticationException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.ServiceUtils;
+
+public class LdapAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+
+ private final String ldapURL;
+ private final String baseDN;
+ private final String ldapDomain;
+
+ LdapAuthenticationProviderImpl() {
+ HiveConf conf = new HiveConf();
+ ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
+ baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
+ ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
+ }
+
+ @Override
+ public void Authenticate(String user, String password) throws AuthenticationException {
+
+ Hashtable<String, Object> env = new Hashtable<String, Object>();
+ env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
+ env.put(Context.PROVIDER_URL, ldapURL);
+
+ // If the domain is available in the config, then append it unless domain is
+ // already part of the username. LDAP providers like Active Directory use a
+ // fully qualified user name like foo@bar.com.
+ if (!hasDomain(user) && ldapDomain != null) {
+ user = user + "@" + ldapDomain;
+ }
+
+ if (password == null || password.isEmpty() || password.getBytes()[0] == 0) {
+ throw new AuthenticationException("Error validating LDAP user:" +
+ " a null or blank password has been provided");
+ }
+
+ // setup the security principal
+ String bindDN;
+ if (baseDN == null) {
+ bindDN = user;
+ } else {
+ bindDN = "uid=" + user + "," + baseDN;
+ }
+ env.put(Context.SECURITY_AUTHENTICATION, "simple");
+ env.put(Context.SECURITY_PRINCIPAL, bindDN);
+ env.put(Context.SECURITY_CREDENTIALS, password);
+
+ try {
+ // Create initial context
+ Context ctx = new InitialDirContext(env);
+ ctx.close();
+ } catch (NamingException e) {
+ throw new AuthenticationException("Error validating LDAP user", e);
+ }
+ }
+
+ private boolean hasDomain(String userName) {
+ return (ServiceUtils.indexOfDomainMatch(userName) > 0);
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
new file mode 100644
index 0000000000..68f62c4617
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+import net.sf.jpam.Pam;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class PamAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+
+ private final String pamServiceNames;
+
+ PamAuthenticationProviderImpl() {
+ HiveConf conf = new HiveConf();
+ pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
+ }
+
+ @Override
+ public void Authenticate(String user, String password) throws AuthenticationException {
+
+ if (pamServiceNames == null || pamServiceNames.trim().isEmpty()) {
+ throw new AuthenticationException("No PAM services are set.");
+ }
+
+ String[] pamServices = pamServiceNames.split(",");
+ for (String pamService : pamServices) {
+ Pam pam = new Pam(pamService);
+ boolean isAuthenticated = pam.authenticateSuccessful(user, password);
+ if (!isAuthenticated) {
+ throw new AuthenticationException(
+ "Error authenticating with the PAM service: " + pamService);
+ }
+ }
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
new file mode 100644
index 0000000000..e2a6de165a
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+public interface PasswdAuthenticationProvider {
+
+ /**
+ * The Authenticate method is called by the HiveServer2 authentication layer
+ * to authenticate users for their requests.
+ * If a user is to be granted, return nothing/throw nothing.
+ * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
+ * <p/>
+ * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
+ *
+ * @param user The username received over the connection request
+ * @param password The password received over the connection request
+ *
+ * @throws AuthenticationException When a user is found to be
+ * invalid by the implementation
+ */
+ void Authenticate(String user, String password) throws AuthenticationException;
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java
new file mode 100644
index 0000000000..afc144199f
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.security.Security;
+import java.util.HashMap;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.NameCallback;
+import javax.security.auth.callback.PasswordCallback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.auth.login.LoginException;
+import javax.security.sasl.AuthenticationException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.SaslException;
+
+import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
+import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TSaslServerTransport;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportFactory;
+
+public final class PlainSaslHelper {
+
+ public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService service) {
+ return new SQLPlainProcessorFactory(service);
+ }
+
+ // Register Plain SASL server provider
+ static {
+ Security.addProvider(new SaslPlainProvider());
+ }
+
+ public static TTransportFactory getPlainTransportFactory(String authTypeStr)
+ throws LoginException {
+ TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
+ try {
+ saslFactory.addServerDefinition("PLAIN", authTypeStr, null, new HashMap<String, String>(),
+ new PlainServerCallbackHandler(authTypeStr));
+ } catch (AuthenticationException e) {
+ throw new LoginException("Error setting callback handler" + e);
+ }
+ return saslFactory;
+ }
+
+ public static TTransport getPlainTransport(String username, String password,
+ TTransport underlyingTransport) throws SaslException {
+ return new TSaslClientTransport("PLAIN", null, null, null, new HashMap<String, String>(),
+ new PlainCallbackHandler(username, password), underlyingTransport);
+ }
+
+ private PlainSaslHelper() {
+ throw new UnsupportedOperationException("Can't initialize class");
+ }
+
+ private static final class PlainServerCallbackHandler implements CallbackHandler {
+
+ private final AuthMethods authMethod;
+
+ PlainServerCallbackHandler(String authMethodStr) throws AuthenticationException {
+ authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
+ }
+
+ @Override
+ public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+ String username = null;
+ String password = null;
+ AuthorizeCallback ac = null;
+
+ for (Callback callback : callbacks) {
+ if (callback instanceof NameCallback) {
+ NameCallback nc = (NameCallback) callback;
+ username = nc.getName();
+ } else if (callback instanceof PasswordCallback) {
+ PasswordCallback pc = (PasswordCallback) callback;
+ password = new String(pc.getPassword());
+ } else if (callback instanceof AuthorizeCallback) {
+ ac = (AuthorizeCallback) callback;
+ } else {
+ throw new UnsupportedCallbackException(callback);
+ }
+ }
+ PasswdAuthenticationProvider provider =
+ AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
+ provider.Authenticate(username, password);
+ if (ac != null) {
+ ac.setAuthorized(true);
+ }
+ }
+ }
+
+ public static class PlainCallbackHandler implements CallbackHandler {
+
+ private final String username;
+ private final String password;
+
+ public PlainCallbackHandler(String username, String password) {
+ this.username = username;
+ this.password = password;
+ }
+
+ @Override
+ public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+ for (Callback callback : callbacks) {
+ if (callback instanceof NameCallback) {
+ NameCallback nameCallback = (NameCallback) callback;
+ nameCallback.setName(username);
+ } else if (callback instanceof PasswordCallback) {
+ PasswordCallback passCallback = (PasswordCallback) callback;
+ passCallback.setPassword(password.toCharArray());
+ } else {
+ throw new UnsupportedCallbackException(callback);
+ }
+ }
+ }
+ }
+
+ private static final class SQLPlainProcessorFactory extends TProcessorFactory {
+
+ private final ThriftCLIService service;
+
+ SQLPlainProcessorFactory(ThriftCLIService service) {
+ super(null);
+ this.service = service;
+ }
+
+ @Override
+ public TProcessor getProcessor(TTransport trans) {
+ return new TSetIpAddressProcessor<Iface>(service);
+ }
+ }
+
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslServer.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslServer.java
new file mode 100644
index 0000000000..cd675da29a
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslServer.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.security.Provider;
+import java.util.ArrayDeque;
+import java.util.Deque;
+import java.util.Map;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.NameCallback;
+import javax.security.auth.callback.PasswordCallback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.SaslException;
+import javax.security.sasl.SaslServer;
+import javax.security.sasl.SaslServerFactory;
+
+import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
+
+/**
+ * Sun JDK only provides a PLAIN client and no server. This class implements the Plain SASL server
+ * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt).
+ */
+public class PlainSaslServer implements SaslServer {
+
+ public static final String PLAIN_METHOD = "PLAIN";
+ private String user;
+ private final CallbackHandler handler;
+
+ PlainSaslServer(CallbackHandler handler, String authMethodStr) throws SaslException {
+ this.handler = handler;
+ AuthMethods.getValidAuthMethod(authMethodStr);
+ }
+
+ @Override
+ public String getMechanismName() {
+ return PLAIN_METHOD;
+ }
+
+ @Override
+ public byte[] evaluateResponse(byte[] response) throws SaslException {
+ try {
+ // parse the response
+ // message = [authzid] UTF8NUL authcid UTF8NUL passwd'
+
+ Deque<String> tokenList = new ArrayDeque<String>();
+ StringBuilder messageToken = new StringBuilder();
+ for (byte b : response) {
+ if (b == 0) {
+ tokenList.addLast(messageToken.toString());
+ messageToken = new StringBuilder();
+ } else {
+ messageToken.append((char) b);
+ }
+ }
+ tokenList.addLast(messageToken.toString());
+
+ // validate response
+ if (tokenList.size() < 2 || tokenList.size() > 3) {
+ throw new SaslException("Invalid message format");
+ }
+ String passwd = tokenList.removeLast();
+ user = tokenList.removeLast();
+ // optional authzid
+ String authzId;
+ if (tokenList.isEmpty()) {
+ authzId = user;
+ } else {
+ authzId = tokenList.removeLast();
+ }
+ if (user == null || user.isEmpty()) {
+ throw new SaslException("No user name provided");
+ }
+ if (passwd == null || passwd.isEmpty()) {
+ throw new SaslException("No password name provided");
+ }
+
+ NameCallback nameCallback = new NameCallback("User");
+ nameCallback.setName(user);
+ PasswordCallback pcCallback = new PasswordCallback("Password", false);
+ pcCallback.setPassword(passwd.toCharArray());
+ AuthorizeCallback acCallback = new AuthorizeCallback(user, authzId);
+
+ Callback[] cbList = {nameCallback, pcCallback, acCallback};
+ handler.handle(cbList);
+ if (!acCallback.isAuthorized()) {
+ throw new SaslException("Authentication failed");
+ }
+ } catch (IllegalStateException eL) {
+ throw new SaslException("Invalid message format", eL);
+ } catch (IOException eI) {
+ throw new SaslException("Error validating the login", eI);
+ } catch (UnsupportedCallbackException eU) {
+ throw new SaslException("Error validating the login", eU);
+ }
+ return null;
+ }
+
+ @Override
+ public boolean isComplete() {
+ return user != null;
+ }
+
+ @Override
+ public String getAuthorizationID() {
+ return user;
+ }
+
+ @Override
+ public byte[] unwrap(byte[] incoming, int offset, int len) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public byte[] wrap(byte[] outgoing, int offset, int len) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Object getNegotiatedProperty(String propName) {
+ return null;
+ }
+
+ @Override
+ public void dispose() {}
+
+ public static class SaslPlainServerFactory implements SaslServerFactory {
+
+ @Override
+ public SaslServer createSaslServer(String mechanism, String protocol, String serverName,
+ Map<String, ?> props, CallbackHandler cbh) {
+ if (PLAIN_METHOD.equals(mechanism)) {
+ try {
+ return new PlainSaslServer(cbh, protocol);
+ } catch (SaslException e) {
+ /* This is to fulfill the contract of the interface which states that an exception shall
+ be thrown when a SaslServer cannot be created due to an error but null should be
+ returned when a Server can't be created due to the parameters supplied. And the only
+ thing PlainSaslServer can fail on is a non-supported authentication mechanism.
+ That's why we return null instead of throwing the Exception */
+ return null;
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public String[] getMechanismNames(Map<String, ?> props) {
+ return new String[] {PLAIN_METHOD};
+ }
+ }
+
+ public static class SaslPlainProvider extends Provider {
+
+ public SaslPlainProvider() {
+ super("HiveSaslPlain", 1.0, "Hive Plain SASL provider");
+ put("SaslServerFactory.PLAIN", SaslPlainServerFactory.class.getName());
+ }
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java
new file mode 100644
index 0000000000..479ebf32ce
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/SaslQOP.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Possible values of SASL quality-of-protection value.
+ */
+public enum SaslQOP {
+ AUTH("auth"), // Authentication only.
+ AUTH_INT("auth-int"), // Authentication and integrity checking by using signatures.
+ AUTH_CONF("auth-conf"); // Authentication, integrity and confidentiality checking
+ // by using signatures and encryption.
+
+ public final String saslQop;
+
+ private static final Map<String, SaslQOP> STR_TO_ENUM = new HashMap<String, SaslQOP>();
+
+ static {
+ for (SaslQOP saslQop : values()) {
+ STR_TO_ENUM.put(saslQop.toString(), saslQop);
+ }
+ }
+
+ SaslQOP(String saslQop) {
+ this.saslQop = saslQop;
+ }
+
+ public String toString() {
+ return saslQop;
+ }
+
+ public static SaslQOP fromString(String str) {
+ if (str != null) {
+ str = str.toLowerCase();
+ }
+ SaslQOP saslQOP = STR_TO_ENUM.get(str);
+ if (saslQOP == null) {
+ throw new IllegalArgumentException(
+ "Unknown auth type: " + str + " Allowed values are: " + STR_TO_ENUM.keySet());
+ }
+ return saslQOP;
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
new file mode 100644
index 0000000000..645e3e2bbd
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.cli.thrift.TCLIService.Iface;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TSaslServerTransport;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class is responsible for setting the ipAddress for operations executed via HiveServer2.
+ * <p>
+ * <ul>
+ * <li>IP address is only set for operations that calls listeners with hookContext</li>
+ * <li>IP address is only set if the underlying transport mechanism is socket</li>
+ * </ul>
+ * </p>
+ *
+ * @see org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext
+ */
+public class TSetIpAddressProcessor<I extends Iface> extends TCLIService.Processor<Iface> {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(TSetIpAddressProcessor.class.getName());
+
+ public TSetIpAddressProcessor(Iface iface) {
+ super(iface);
+ }
+
+ @Override
+ public boolean process(final TProtocol in, final TProtocol out) throws TException {
+ setIpAddress(in);
+ setUserName(in);
+ try {
+ return super.process(in, out);
+ } finally {
+ THREAD_LOCAL_USER_NAME.remove();
+ THREAD_LOCAL_IP_ADDRESS.remove();
+ }
+ }
+
+ private void setUserName(final TProtocol in) {
+ TTransport transport = in.getTransport();
+ if (transport instanceof TSaslServerTransport) {
+ String userName = ((TSaslServerTransport) transport).getSaslServer().getAuthorizationID();
+ THREAD_LOCAL_USER_NAME.set(userName);
+ }
+ }
+
+ protected void setIpAddress(final TProtocol in) {
+ TTransport transport = in.getTransport();
+ TSocket tSocket = getUnderlyingSocketFromTransport(transport);
+ if (tSocket == null) {
+ LOGGER.warn("Unknown Transport, cannot determine ipAddress");
+ } else {
+ THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().getHostAddress());
+ }
+ }
+
+ private TSocket getUnderlyingSocketFromTransport(TTransport transport) {
+ while (transport != null) {
+ if (transport instanceof TSaslServerTransport) {
+ transport = ((TSaslServerTransport) transport).getUnderlyingTransport();
+ }
+ if (transport instanceof TSaslClientTransport) {
+ transport = ((TSaslClientTransport) transport).getUnderlyingTransport();
+ }
+ if (transport instanceof TSocket) {
+ return (TSocket) transport;
+ }
+ }
+ return null;
+ }
+
+ private static final ThreadLocal<String> THREAD_LOCAL_IP_ADDRESS = new ThreadLocal<String>() {
+ @Override
+ protected synchronized String initialValue() {
+ return null;
+ }
+ };
+
+ private static final ThreadLocal<String> THREAD_LOCAL_USER_NAME = new ThreadLocal<String>() {
+ @Override
+ protected synchronized String initialValue() {
+ return null;
+ }
+ };
+
+ public static String getUserIpAddress() {
+ return THREAD_LOCAL_IP_ADDRESS.get();
+ }
+
+ public static String getUserName() {
+ return THREAD_LOCAL_USER_NAME.get();
+ }
+}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
new file mode 100644
index 0000000000..2422e86c6b
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import java.security.AccessControlContext;
+import java.security.AccessController;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import javax.security.auth.Subject;
+
+import org.apache.hadoop.hive.thrift.TFilterTransport;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+/**
+ * This is used on the client side, where the API explicitly opens a transport to
+ * the server using the Subject.doAs().
+ */
+public class TSubjectAssumingTransport extends TFilterTransport {
+
+ public TSubjectAssumingTransport(TTransport wrapped) {
+ super(wrapped);
+ }
+
+ @Override
+ public void open() throws TTransportException {
+ try {
+ AccessControlContext context = AccessController.getContext();
+ Subject subject = Subject.getSubject(context);
+ Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
+ public Void run() {
+ try {
+ wrapped.open();
+ } catch (TTransportException tte) {
+ // Wrap the transport exception in an RTE, since Subject.doAs() then goes
+ // and unwraps this for us out of the doAs block. We then unwrap one
+ // more time in our catch clause to get back the TTE. (ugh)
+ throw new RuntimeException(tte);
+ }
+ return null;
+ }
+ });
+ } catch (PrivilegedActionException ioe) {
+ throw new RuntimeException("Received an ioe we never threw!", ioe);
+ } catch (RuntimeException rte) {
+ if (rte.getCause() instanceof TTransportException) {
+ throw (TTransportException) rte.getCause();
+ } else {
+ throw rte;
+ }
+ }
+ }
+
+}