From adc75eaacfc5dde330478126ef1c361a8ad393a8 Mon Sep 17 00:00:00 2001 From: slothever Date: Thu, 4 Jul 2024 17:40:00 +0800 Subject: [PATCH 1/3] [fix](kerberos)fix ugi login --- .../java/org/apache/doris/common/Config.java | 7 +- .../authentication/HadoopAuthenticator.java | 30 +++++ .../HadoopKerberosAuthenticator.java | 110 ++++++++++++++++++ .../HadoopSimpleAuthenticator.java | 66 +++++++++++ .../security/authentication/HadoopUGI.java | 98 +++++----------- .../ImpersonatingHadoopAuthenticator.java | 49 ++++++++ .../KerberosAuthenticationConfig.java | 2 + .../datasource/hive/HMSCachedClient.java | 3 + .../datasource/hive/HMSExternalCatalog.java | 5 - .../datasource/hive/HiveMetaStoreCache.java | 66 +++++++++++ .../datasource/hive/HiveMetadataOps.java | 8 +- .../hive/PostgreSQLJdbcHMSCachedClient.java | 6 + .../hive/ThriftHMSCachedClient.java | 12 +- .../CachingKerberosAuthenticator.java | 50 ++++++++ .../doris/datasource/TestHMSCachedClient.java | 6 + .../kerberos/test_two_hive_kerberos.groovy | 32 +++++ 16 files changed, 470 insertions(+), 80 deletions(-) create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java create mode 100644 fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index db48f445f80a79..d36d52ad6069c8 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -2672,8 +2672,9 @@ public class Config extends ConfigBase { "Should the request content be logged before each request starts, specifically the query statements"}) public static boolean enable_print_request_before_execution = false; - @ConfField(mutable = true) - public static boolean enable_cooldown_replica_affinity = true; + @ConfField(description = {"Kerberos TGT ticket缓存更新周期", + "Kerberos TGT ticket cache update period"}) + public static long kerberos_tgt_cache_renew_time = 3600 * 1000L; //========================================================================== // begin of cloud config @@ -2913,6 +2914,8 @@ public static int metaServiceRpcRetryTimes() { // ATTN: DONOT add any config not related to cloud mode here // ATTN: DONOT add any config not related to cloud mode here // ATTN: DONOT add any config not related to cloud mode here + @ConfField(mutable = true) + public static boolean enable_cooldown_replica_affinity = true; //========================================================================== // end of cloud config //========================================================================== diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java new file mode 100644 index 00000000000000..76f8221c887df0 --- /dev/null +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java @@ -0,0 +1,30 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.common.security.authentication; + +import org.apache.hadoop.security.UserGroupInformation; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; + +public interface HadoopAuthenticator { + + UserGroupInformation getUGI() throws IOException; + + T doAs(PrivilegedExceptionAction action) throws Exception; +} diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java new file mode 100644 index 00000000000000..3ac21b56706653 --- /dev/null +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java @@ -0,0 +1,110 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.common.security.authentication; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.Collections; +import java.util.Map; +import javax.security.auth.Subject; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; + +public class HadoopKerberosAuthenticator implements HadoopAuthenticator { + private static final Logger LOG = LogManager.getLogger(HadoopKerberosAuthenticator.class); + private final KerberosAuthenticationConfig config; + private final UserGroupInformation ugi; + + public HadoopKerberosAuthenticator(KerberosAuthenticationConfig config) { + this.config = config; + try { + ugi = getUGI(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static void initializeAuthConfig(Configuration hadoopConf) { + hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); + hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_KERBEROS_KEYTAB_LOGIN_AUTORENEWAL_ENABLED, "true"); + synchronized (HadoopKerberosAuthenticator.class) { + // avoid other catalog set conf at the same time + UserGroupInformation.setConfiguration(hadoopConf); + } + } + + @Override + public UserGroupInformation getUGI() throws IOException { + // login and get ugi when catalog is initialized + initializeAuthConfig(config.getConf()); + String principal = config.getKerberosPrincipal(); + Subject subject = getSubject(config.getKerberosKeytab(), principal); + LOG.debug("Login by kerberos authentication with principal: {}", principal); + return UserGroupInformation.getUGIFromSubject(subject); + } + + protected static Subject getSubject(String keytab, String principal) { + Subject subject = new Subject(false, ImmutableSet.of(new KerberosPrincipal(principal)), + Collections.emptySet(), Collections.emptySet()); + javax.security.auth.login.Configuration conf = getConfiguration(keytab, principal); + try { + LoginContext loginContext = new LoginContext("", subject, null, conf); + loginContext.login(); + return loginContext.getSubject(); + } catch (LoginException e) { + throw new RuntimeException(e); + } + } + + protected static javax.security.auth.login.Configuration getConfiguration(String keytab, String principal) { + return new javax.security.auth.login.Configuration() { + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + Map options = ImmutableMap.builder() + .put("doNotPrompt", "true") + .put("isInitiator", "true") + .put("useKeyTab", "true") + .put("storeKey", "true") + .put("keyTab", keytab) + .put("principal", principal) + // .put("debug", "true") + .build(); + return new AppConfigurationEntry[]{ + new AppConfigurationEntry( + "com.sun.security.auth.module.Krb5LoginModule", + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, + options)}; + } + }; + } + + @Override + public T doAs(PrivilegedExceptionAction action) throws Exception { + return ugi.doAs(action); + } +} diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java new file mode 100644 index 00000000000000..44cf01b567fb13 --- /dev/null +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java @@ -0,0 +1,66 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.common.security.authentication; + +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; + +public class HadoopSimpleAuthenticator implements HadoopAuthenticator { + private static final Logger LOG = LogManager.getLogger(HadoopSimpleAuthenticator.class); + private final SimpleAuthenticationConfig config; + private UserGroupInformation ugi; + + public HadoopSimpleAuthenticator(SimpleAuthenticationConfig config) { + this.config = config; + } + + @Override + public UserGroupInformation getUGI() { + String hadoopUserName = config.getUsername(); + if (hadoopUserName == null) { + hadoopUserName = "hadoop"; + config.setUsername(hadoopUserName); + LOG.debug(AuthenticationConfig.HADOOP_USER_NAME + " is unset, use default user: hadoop"); + } + try { + // get login user just for simple auth + ugi = UserGroupInformation.getLoginUser(); + if (ugi.getUserName().equals(hadoopUserName)) { + return ugi; + } + } catch (IOException e) { + LOG.warn("A SecurityException occurs with simple, do login immediately.", e); + } + ugi = UserGroupInformation.createRemoteUser(hadoopUserName); + UserGroupInformation.setLoginUser(ugi); + LOG.debug("Login by proxy user, hadoop.username: {}", hadoopUserName); + return ugi; + } + + @Override + public T doAs(PrivilegedExceptionAction action) throws Exception { + if (ugi != null) { + return ugi.doAs(action); + } + return action.run(); + } +} diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java index 1a86b9e327a2fb..36470d4ed0383e 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java @@ -18,8 +18,6 @@ package org.apache.doris.common.security.authentication; import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.security.UserGroupInformation; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -30,94 +28,52 @@ public class HadoopUGI { private static final Logger LOG = LogManager.getLogger(HadoopUGI.class); + public static HadoopAuthenticator getHadoopAuthenticator(AuthenticationConfig config) { + if (config instanceof KerberosAuthenticationConfig) { + return new HadoopKerberosAuthenticator((KerberosAuthenticationConfig) config); + } else { + return new HadoopSimpleAuthenticator((SimpleAuthenticationConfig) config); + } + } + /** * login and return hadoop ugi * @param config auth config * @return ugi */ + @Deprecated private static UserGroupInformation loginWithUGI(AuthenticationConfig config) { if (config == null || !config.isValid()) { return null; } - UserGroupInformation ugi; if (config instanceof KerberosAuthenticationConfig) { - KerberosAuthenticationConfig krbConfig = (KerberosAuthenticationConfig) config; - Configuration hadoopConf = krbConfig.getConf(); - hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_KERBEROS_KEYTAB_LOGIN_AUTORENEWAL_ENABLED, "true"); - UserGroupInformation.setConfiguration(hadoopConf); - String principal = krbConfig.getKerberosPrincipal(); try { - // login hadoop with keytab and try checking TGT - ugi = UserGroupInformation.getLoginUser(); - LOG.debug("Current login user: {}", ugi.getUserName()); - if (ugi.hasKerberosCredentials() && StringUtils.equals(ugi.getUserName(), principal)) { - // if the current user is logged by kerberos and is the same user - // just use checkTGTAndReloginFromKeytab because this method will only relogin - // when the TGT is expired or is close to expiry - ugi.checkTGTAndReloginFromKeytab(); - return ugi; + // TODO: remove after iceberg and hudi kerberos test case pass + try { + // login hadoop with keytab and try checking TGT + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + LOG.debug("Current login user: {}", ugi.getUserName()); + String principal = ((KerberosAuthenticationConfig) config).getKerberosPrincipal(); + if (ugi.hasKerberosCredentials() && StringUtils.equals(ugi.getUserName(), principal)) { + // if the current user is logged by kerberos and is the same user + // just use checkTGTAndReloginFromKeytab because this method will only relogin + // when the TGT is expired or is close to expiry + ugi.checkTGTAndReloginFromKeytab(); + return ugi; + } + } catch (IOException e) { + LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e); } - } catch (IOException e) { - LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e); - } - try { - ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, krbConfig.getKerberosKeytab()); - UserGroupInformation.setLoginUser(ugi); - LOG.debug("Login by kerberos authentication with principal: {}", principal); - return ugi; + return new HadoopKerberosAuthenticator((KerberosAuthenticationConfig) config).getUGI(); } catch (IOException e) { throw new RuntimeException(e); } } else { - String hadoopUserName = ((SimpleAuthenticationConfig) config).getUsername(); - if (hadoopUserName == null) { - hadoopUserName = "hadoop"; - ((SimpleAuthenticationConfig) config).setUsername(hadoopUserName); - LOG.debug(AuthenticationConfig.HADOOP_USER_NAME + " is unset, use default user: hadoop"); - } - - try { - ugi = UserGroupInformation.getLoginUser(); - if (ugi.getUserName().equals(hadoopUserName)) { - return ugi; - } - } catch (IOException e) { - LOG.warn("A SecurityException occurs with simple, do login immediately.", e); - } - - ugi = UserGroupInformation.createRemoteUser(hadoopUserName); - UserGroupInformation.setLoginUser(ugi); - LOG.debug("Login by proxy user, hadoop.username: {}", hadoopUserName); - return ugi; - } - } - - /** - * use for HMSExternalCatalog to login - * @param config auth config - */ - public static void tryKrbLogin(String catalogName, AuthenticationConfig config) { - if (config instanceof KerberosAuthenticationConfig) { - KerberosAuthenticationConfig krbConfig = (KerberosAuthenticationConfig) config; - try { - Configuration hadoopConf = krbConfig.getConf(); - hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_KERBEROS_KEYTAB_LOGIN_AUTORENEWAL_ENABLED, "true"); - UserGroupInformation.setConfiguration(hadoopConf); - /** - * Because metastore client is created by using - * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy} - * it will relogin when TGT is expired, so we don't need to relogin manually. - */ - UserGroupInformation.loginUserFromKeytab(krbConfig.getKerberosPrincipal(), - krbConfig.getKerberosKeytab()); - } catch (IOException e) { - throw new RuntimeException("login with kerberos auth failed for catalog: " + catalogName, e); - } + return new HadoopSimpleAuthenticator((SimpleAuthenticationConfig) config).getUGI(); } } + @Deprecated public static T ugiDoAs(AuthenticationConfig authConf, PrivilegedExceptionAction action) { UserGroupInformation ugi = HadoopUGI.loginWithUGI(authConf); try { diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java new file mode 100644 index 00000000000000..f1881d0ef57382 --- /dev/null +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java @@ -0,0 +1,49 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.common.security.authentication; + +import org.apache.hadoop.security.UserGroupInformation; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; +import java.util.Objects; + +public class ImpersonatingHadoopAuthenticator implements HadoopAuthenticator { + + private final HadoopAuthenticator delegate; + private final String username; + private UserGroupInformation ugi; + + public ImpersonatingHadoopAuthenticator(HadoopAuthenticator delegate, String username) { + this.delegate = Objects.requireNonNull(delegate); + this.username = Objects.requireNonNull(username); + } + + @Override + public UserGroupInformation getUGI() throws IOException { + synchronized (ImpersonatingHadoopAuthenticator.class) { + ugi = UserGroupInformation.createProxyUser(username, delegate.getUGI()); + } + return ugi; + } + + @Override + public T doAs(PrivilegedExceptionAction action) throws Exception { + return ugi.doAs(action); + } +} diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java index 722cd0352b7d7d..f3faabac2d72c9 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java @@ -18,9 +18,11 @@ package org.apache.doris.common.security.authentication; import lombok.Data; +import lombok.EqualsAndHashCode; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; +@EqualsAndHashCode(callSuper = true) @Data public class KerberosAuthenticationConfig extends AuthenticationConfig { private String kerberosPrincipal; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java index 1ac77972053677..08be68441ffc03 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java @@ -18,6 +18,7 @@ package org.apache.doris.datasource.hive; import org.apache.doris.analysis.TableName; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.DatabaseMetadata; import org.apache.doris.datasource.TableMetadata; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; @@ -113,6 +114,8 @@ void updatePartitionStatistics( void dropPartition(String dbName, String tableName, List partitionValues, boolean deleteData); + void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator); + /** * close the connection, eg, to hms */ diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index 91192b63c24d6e..c920edb0c42378 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -23,8 +23,6 @@ import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; import org.apache.doris.common.ThreadPoolManager; -import org.apache.doris.common.security.authentication.AuthenticationConfig; -import org.apache.doris.common.security.authentication.HadoopUGI; import org.apache.doris.datasource.CatalogProperty; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; @@ -148,9 +146,6 @@ protected void initLocalObjectsImpl() { } hiveConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(), String.valueOf(Config.hive_metastore_client_timeout_second)); - HadoopUGI.tryKrbLogin(this.getName(), AuthenticationConfig.getKerberosConfig(hiveConf, - AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL, - AuthenticationConfig.HADOOP_KERBEROS_KEYTAB)); } HiveMetadataOps hiveOps = ExternalMetadataOperations.newHiveMetadataOps(hiveConf, jdbcClientConfig, this); FileSystemProvider fileSystemProvider = new FileSystemProviderImpl(Env.getCurrentEnv().getExtMetaCacheMgr(), diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java index 7f23385d847886..34a4bfa83d58b3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java @@ -32,11 +32,16 @@ import org.apache.doris.common.FeConstants; import org.apache.doris.common.UserException; import org.apache.doris.common.security.authentication.AuthenticationConfig; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; +import org.apache.doris.common.security.authentication.HadoopUGI; +import org.apache.doris.common.security.authentication.KerberosAuthenticationConfig; +import org.apache.doris.common.security.authentication.SimpleAuthenticationConfig; import org.apache.doris.common.util.CacheBulkLoader; import org.apache.doris.common.util.LocationPath; import org.apache.doris.datasource.CacheException; import org.apache.doris.datasource.FileSplit; import org.apache.doris.datasource.hive.AcidInfo.DeleteDeltaInfo; +import org.apache.doris.datasource.hive.security.CachingKerberosAuthenticator; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.fs.FileSystemCache; import org.apache.doris.fs.remote.RemoteFile; @@ -119,6 +124,7 @@ public class HiveMetaStoreCache { // Other thread may reset this cache, so use AtomicReference to wrap it. private volatile AtomicReference> fileCacheRef = new AtomicReference<>(); + private CachingKerberosAuthenticator authenticator; public HiveMetaStoreCache(HMSExternalCatalog catalog, ExecutorService refreshExecutor, ExecutorService fileListingExecutor) { @@ -163,6 +169,7 @@ public Map loadAll(Iterable authenticatorCache = + authCacheFactory.buildCache(key -> { + HadoopAuthenticator auth = HadoopUGI.getHadoopAuthenticator(config); + return auth.getUGI(); + }, null, this.refreshExecutor); + authenticator = new CachingKerberosAuthenticator(config, authenticatorCache); + } + private void initMetrics() { // partition value GaugeMetric valueCacheGauge = new GaugeMetric("hive_meta_cache", @@ -586,6 +610,10 @@ public void invalidateTableCache(String dbName, String tblName) { } } + public HadoopAuthenticator getCachingKerberosAuthenticator() { + return authenticator; + } + public void invalidatePartitionCache(String dbName, String tblName, String partitionName) { PartitionValueCacheKey key = new PartitionValueCacheKey(dbName, tblName, null); HivePartitionValues partitionValues = partitionValuesCache.getIfPresent(key); @@ -1071,6 +1099,44 @@ private static boolean isGeneratedPath(String name) { } } + @Data + public static class AuthenticatorCacheKey { + private String credentialName; + + public AuthenticatorCacheKey(String credentialName) { + this.credentialName = credentialName; + } + + public static AuthenticatorCacheKey createHadoopAuthKey(AuthenticationConfig config) { + if (config instanceof KerberosAuthenticationConfig) { + return new AuthenticatorCacheKey(((KerberosAuthenticationConfig) config).getKerberosPrincipal()); + } else { + return new AuthenticatorCacheKey(((SimpleAuthenticationConfig) config).getUsername()); + } + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof AuthenticatorCacheKey)) { + return false; + } + return credentialName.equals(((AuthenticatorCacheKey) obj).credentialName); + } + + @Override + public int hashCode() { + return Objects.hash(credentialName); + } + + @Override + public String toString() { + return "AuthenticatorCacheKey{" + "credentialName=" + credentialName + '}'; + } + } + @Data public static class HiveFileStatus { BlockLocation[] blockLocations; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index 7267297c93e7a6..b25d68ed695fc7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -32,6 +32,7 @@ import org.apache.doris.common.ErrorCode; import org.apache.doris.common.ErrorReport; import org.apache.doris.common.UserException; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.jdbc.client.JdbcClient; import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; @@ -60,11 +61,15 @@ public class HiveMetadataOps implements ExternalMetadataOps { private static final int MIN_CLIENT_POOL_SIZE = 8; private final HMSCachedClient client; private final HMSExternalCatalog catalog; + private HadoopAuthenticator hadoopAuthenticator; public HiveMetadataOps(HiveConf hiveConf, JdbcClientConfig jdbcClientConfig, HMSExternalCatalog catalog) { this(catalog, createCachedClient(hiveConf, Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size), jdbcClientConfig)); + hadoopAuthenticator = Env.getCurrentEnv().getExtMetaCacheMgr().getMetaStoreCache(catalog) + .getCachingKerberosAuthenticator(); + client.setHadoopAuthenticator(hadoopAuthenticator); } @VisibleForTesting @@ -84,7 +89,8 @@ public HMSExternalCatalog getCatalog() { private static HMSCachedClient createCachedClient(HiveConf hiveConf, int thriftClientPoolSize, JdbcClientConfig jdbcClientConfig) { if (hiveConf != null) { - return new ThriftHMSCachedClient(hiveConf, thriftClientPoolSize); + ThriftHMSCachedClient client = new ThriftHMSCachedClient(hiveConf, thriftClientPoolSize); + return client; } Preconditions.checkNotNull(jdbcClientConfig, "hiveConf and jdbcClientConfig are both null"); String dbType = JdbcClient.parseDbType(jdbcClientConfig.getJdbcUrl()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java index 0cdb3e469c2951..729b4cc617ccc7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java @@ -20,6 +20,7 @@ import org.apache.doris.analysis.TableName; import org.apache.doris.catalog.JdbcTable; import org.apache.doris.catalog.Type; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.DatabaseMetadata; import org.apache.doris.datasource.TableMetadata; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; @@ -587,4 +588,9 @@ public void dropTable(String dbName, String tblName) { public String getCatalogLocation(String catalogName) { throw new HMSClientException("Do not support in PostgreSQLJdbcHMSCachedClient."); } + + @Override + public void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator) { + throw new HMSClientException("Do not support in PostgreSQLJdbcHMSCachedClient."); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index 55d8ffc2e02f46..7632101d7ffaeb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -20,6 +20,7 @@ import org.apache.doris.analysis.TableName; import org.apache.doris.catalog.Column; import org.apache.doris.common.Config; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.DatabaseMetadata; import org.apache.doris.datasource.TableMetadata; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; @@ -92,6 +93,7 @@ public class ThriftHMSCachedClient implements HMSCachedClient { private boolean isClosed = false; private final int poolSize; private final HiveConf hiveConf; + private HadoopAuthenticator hadoopAuthenticator; public ThriftHMSCachedClient(HiveConf hiveConf, int poolSize) { Preconditions.checkArgument(poolSize > 0, poolSize); @@ -104,6 +106,10 @@ public ThriftHMSCachedClient(HiveConf hiveConf, int poolSize) { this.isClosed = false; } + public void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator) { + this.hadoopAuthenticator = hadoopAuthenticator; + } + @Override public void close() { synchronized (clientPool) { @@ -678,7 +684,11 @@ public String getCatalogLocation(String catalogName) { } private T ugiDoAs(PrivilegedExceptionAction action) { - return HiveMetaStoreClientHelper.ugiDoAs(hiveConf, action); + try { + return hadoopAuthenticator.doAs(action); + } catch (Exception e) { + throw new RuntimeException(e); + } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java new file mode 100644 index 00000000000000..8da370a32c6893 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java @@ -0,0 +1,50 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.hive.security; + +import org.apache.doris.common.security.authentication.AuthenticationConfig; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; +import org.apache.doris.datasource.hive.HiveMetaStoreCache; + +import com.github.benmanes.caffeine.cache.LoadingCache; +import org.apache.hadoop.security.UserGroupInformation; + +import java.io.IOException; +import java.security.PrivilegedExceptionAction; + +public class CachingKerberosAuthenticator implements HadoopAuthenticator { + private final AuthenticationConfig config; + private final LoadingCache authenticatorCache; + + public CachingKerberosAuthenticator(AuthenticationConfig config, + LoadingCache authCacheLoader) { + this.config = config; + this.authenticatorCache = authCacheLoader; + } + + @Override + public UserGroupInformation getUGI() throws IOException { + return authenticatorCache.get(HiveMetaStoreCache.AuthenticatorCacheKey.createHadoopAuthKey(config)); + } + + @Override + public T doAs(PrivilegedExceptionAction action) throws Exception { + return getUGI().doAs(action); + } +} diff --git a/fe/fe-core/src/test/java/org/apache/doris/datasource/TestHMSCachedClient.java b/fe/fe-core/src/test/java/org/apache/doris/datasource/TestHMSCachedClient.java index f3cb918d6f58d2..367088c04b5118 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/datasource/TestHMSCachedClient.java +++ b/fe/fe-core/src/test/java/org/apache/doris/datasource/TestHMSCachedClient.java @@ -18,6 +18,7 @@ package org.apache.doris.datasource; import org.apache.doris.analysis.TableName; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.hive.HMSCachedClient; import org.apache.doris.datasource.hive.HMSTransaction; import org.apache.doris.datasource.hive.HiveDatabaseMetadata; @@ -337,4 +338,9 @@ public List getTableList(String dbName) { } return tablesList; } + + @Override + public void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator) { + + } } diff --git a/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy index a3b39d1221a740..50a20f5a6a95cd 100644 --- a/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy +++ b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy @@ -1,3 +1,5 @@ +import groovyjarjarantlr4.v4.codegen.model.ExceptionClause + // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information @@ -15,6 +17,8 @@ // specific language governing permissions and limitations // under the License. +import org.junit.Assert; + suite("test_two_hive_kerberos", "p0,external,kerberos,external_docker,external_docker_kerberos") { String enabled = context.config.otherConfigs.get("enableKerberosTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { @@ -66,6 +70,34 @@ suite("test_two_hive_kerberos", "p0,external,kerberos,external_docker,external_d sql """ use test_krb_hive_db """ order_qt_q02 """ select * from test_krb_hive_db.test_krb_hive_tbl """ + // 3. multi thread test + Thread thread1 = new Thread(() -> { + try { + for (int i = 0; i < 1000; i++) { + sql """ select * from ${hms_catalog_name}.test_krb_hive_db.test_krb_hive_tbl """ + } + } catch (Exception e) { + log.info(e.getMessage()) + Assert.fail(); + } + }) + + Thread thread2 = new Thread(() -> { + try { + for (int i = 0; i < 1000; i++) { + sql """ select * from other_${hms_catalog_name}.test_krb_hive_db.test_krb_hive_tbl """ + } + } catch (Exception e) { + log.info(e.getMessage()) + Assert.fail(); + } + }) + + thread1.start() + thread2.start() + + thread1.join() + thread2.join() sql """drop catalog ${hms_catalog_name};""" sql """drop catalog other_${hms_catalog_name};""" } From 4399a889e3e843758facce8d720dc502f1df3fb0 Mon Sep 17 00:00:00 2001 From: slothever Date: Mon, 15 Jul 2024 18:46:56 +0800 Subject: [PATCH 2/3] fix merge --- .../kerberos/common/conf/doris-krb5.conf | 2 +- .../java/org/apache/doris/common/Config.java | 7 +- .../authentication/AuthenticationConfig.java | 2 + .../authentication/HadoopAuthenticator.java | 16 ++- .../HadoopKerberosAuthenticator.java | 99 ++++++++++++++----- .../HadoopSimpleAuthenticator.java | 35 ++----- .../security/authentication/HadoopUGI.java | 11 +-- .../ImpersonatingHadoopAuthenticator.java | 10 +- .../KerberosAuthenticationConfig.java | 1 + .../datasource/hive/HMSExternalCatalog.java | 11 +++ .../datasource/hive/HiveMetaStoreCache.java | 66 ------------- .../datasource/hive/HiveMetadataOps.java | 3 +- .../CachingKerberosAuthenticator.java | 50 ---------- .../doris/fs/remote/RemoteFileSystem.java | 14 ++- .../doris/fs/remote/dfs/DFSFileSystem.java | 45 ++++++--- .../kerberos/test_two_hive_kerberos.groovy | 3 +- 16 files changed, 165 insertions(+), 210 deletions(-) delete mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java diff --git a/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf b/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf index 7624b94e6ad2a4..36547b8f89d163 100644 --- a/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf +++ b/docker/thirdparties/docker-compose/kerberos/common/conf/doris-krb5.conf @@ -24,7 +24,7 @@ default_realm = LABS.TERADATA.COM dns_lookup_realm = false dns_lookup_kdc = false - ticket_lifetime = 24h + ticket_lifetime = 5s # this setting is causing a Message stream modified (41) error when talking to KDC running on CentOS 7: https://stackoverflow.com/a/60978520 # renew_lifetime = 7d forwardable = true diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index d36d52ad6069c8..db48f445f80a79 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -2672,9 +2672,8 @@ public class Config extends ConfigBase { "Should the request content be logged before each request starts, specifically the query statements"}) public static boolean enable_print_request_before_execution = false; - @ConfField(description = {"Kerberos TGT ticket缓存更新周期", - "Kerberos TGT ticket cache update period"}) - public static long kerberos_tgt_cache_renew_time = 3600 * 1000L; + @ConfField(mutable = true) + public static boolean enable_cooldown_replica_affinity = true; //========================================================================== // begin of cloud config @@ -2914,8 +2913,6 @@ public static int metaServiceRpcRetryTimes() { // ATTN: DONOT add any config not related to cloud mode here // ATTN: DONOT add any config not related to cloud mode here // ATTN: DONOT add any config not related to cloud mode here - @ConfField(mutable = true) - public static boolean enable_cooldown_replica_affinity = true; //========================================================================== // end of cloud config //========================================================================== diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java index 32a27b2263a746..875ae4542e1193 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/AuthenticationConfig.java @@ -26,6 +26,7 @@ public abstract class AuthenticationConfig { public static String HADOOP_KERBEROS_KEYTAB = "hadoop.kerberos.keytab"; public static String HIVE_KERBEROS_PRINCIPAL = "hive.metastore.kerberos.principal"; public static String HIVE_KERBEROS_KEYTAB = "hive.metastore.kerberos.keytab.file"; + public static String DORIS_KRB5_DEBUG = "doris.krb5.debug"; /** * @return true if the config is valid, otherwise false. @@ -57,6 +58,7 @@ public static AuthenticationConfig getKerberosConfig(Configuration conf, krbConfig.setKerberosPrincipal(conf.get(krbPrincipalKey)); krbConfig.setKerberosKeytab(conf.get(krbKeytabKey)); krbConfig.setConf(conf); + krbConfig.setPrintDebugLog(Boolean.parseBoolean(conf.get(DORIS_KRB5_DEBUG, "false"))); return krbConfig; } else { // AuthType.SIMPLE diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java index 76f8221c887df0..c3cab5f410be3a 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopAuthenticator.java @@ -26,5 +26,19 @@ public interface HadoopAuthenticator { UserGroupInformation getUGI() throws IOException; - T doAs(PrivilegedExceptionAction action) throws Exception; + default T doAs(PrivilegedExceptionAction action) throws IOException { + try { + return getUGI().doAs(action); + } catch (InterruptedException e) { + throw new IOException(e); + } + } + + static HadoopAuthenticator getHadoopAuthenticator(AuthenticationConfig config) { + if (config instanceof KerberosAuthenticationConfig) { + return new HadoopKerberosAuthenticator((KerberosAuthenticationConfig) config); + } else { + return new HadoopSimpleAuthenticator((SimpleAuthenticationConfig) config); + } + } } diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java index 3ac21b56706653..14dace68b2453e 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopKerberosAuthenticator.java @@ -17,8 +17,10 @@ package org.apache.doris.common.security.authentication; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import io.trino.plugin.base.authentication.KerberosTicketUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.security.UserGroupInformation; @@ -26,11 +28,14 @@ import org.apache.logging.log4j.Logger; import java.io.IOException; -import java.security.PrivilegedExceptionAction; import java.util.Collections; +import java.util.Date; import java.util.Map; +import java.util.Objects; +import java.util.Set; import javax.security.auth.Subject; import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.kerberos.KerberosTicket; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; @@ -38,20 +43,16 @@ public class HadoopKerberosAuthenticator implements HadoopAuthenticator { private static final Logger LOG = LogManager.getLogger(HadoopKerberosAuthenticator.class); private final KerberosAuthenticationConfig config; - private final UserGroupInformation ugi; + private Subject subject; + private long nextRefreshTime; + private UserGroupInformation ugi; public HadoopKerberosAuthenticator(KerberosAuthenticationConfig config) { this.config = config; - try { - ugi = getUGI(); - } catch (IOException e) { - throw new RuntimeException(e); - } } public static void initializeAuthConfig(Configuration hadoopConf) { hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - hadoopConf.set(CommonConfigurationKeysPublic.HADOOP_KERBEROS_KEYTAB_LOGIN_AUTORENEWAL_ENABLED, "true"); synchronized (HadoopKerberosAuthenticator.class) { // avoid other catalog set conf at the same time UserGroupInformation.setConfiguration(hadoopConf); @@ -59,19 +60,73 @@ public static void initializeAuthConfig(Configuration hadoopConf) { } @Override - public UserGroupInformation getUGI() throws IOException { + public synchronized UserGroupInformation getUGI() throws IOException { + if (ugi == null) { + subject = getSubject(config.getKerberosKeytab(), config.getKerberosPrincipal(), config.isPrintDebugLog()); + ugi = Objects.requireNonNull(login(subject), "login result is null"); + return ugi; + } + if (nextRefreshTime < System.currentTimeMillis()) { + long lastRefreshTime = nextRefreshTime; + Subject existingSubject = subject; + if (LOG.isDebugEnabled()) { + Date lastTicketEndTime = getTicketEndTime(subject); + LOG.debug("Current ticket expired time is {}", lastTicketEndTime); + } + // renew subject + Subject newSubject = getSubject(config.getKerberosKeytab(), config.getKerberosPrincipal(), + config.isPrintDebugLog()); + Objects.requireNonNull(login(newSubject), "re-login result is null"); + // modify UGI instead of returning new UGI + existingSubject.getPrincipals().addAll(newSubject.getPrincipals()); + Set privateCredentials = existingSubject.getPrivateCredentials(); + // clear the old credentials + synchronized (privateCredentials) { + privateCredentials.clear(); + privateCredentials.addAll(newSubject.getPrivateCredentials()); + } + Set publicCredentials = existingSubject.getPublicCredentials(); + synchronized (publicCredentials) { + publicCredentials.clear(); + publicCredentials.addAll(newSubject.getPublicCredentials()); + } + nextRefreshTime = calculateNextRefreshTime(newSubject); + if (LOG.isDebugEnabled()) { + Date lastTicketEndTime = getTicketEndTime(newSubject); + LOG.debug("Next ticket expired time is {}", lastTicketEndTime); + LOG.debug("Refresh kerberos ticket succeeded, last time is {}, next time is {}", + lastRefreshTime, nextRefreshTime); + } + } + return ugi; + } + + private UserGroupInformation login(Subject subject) throws IOException { // login and get ugi when catalog is initialized initializeAuthConfig(config.getConf()); String principal = config.getKerberosPrincipal(); - Subject subject = getSubject(config.getKerberosKeytab(), principal); - LOG.debug("Login by kerberos authentication with principal: {}", principal); + if (LOG.isDebugEnabled()) { + LOG.debug("Login by kerberos authentication with principal: {}", principal); + } return UserGroupInformation.getUGIFromSubject(subject); } - protected static Subject getSubject(String keytab, String principal) { + private static long calculateNextRefreshTime(Subject subject) { + Preconditions.checkArgument(subject != null, "subject must be present in kerberos based UGI"); + KerberosTicket tgtTicket = KerberosTicketUtils.getTicketGrantingTicket(subject); + return KerberosTicketUtils.getRefreshTime(tgtTicket); + } + + private static Date getTicketEndTime(Subject subject) { + Preconditions.checkArgument(subject != null, "subject must be present in kerberos based UGI"); + KerberosTicket tgtTicket = KerberosTicketUtils.getTicketGrantingTicket(subject); + return tgtTicket.getEndTime(); + } + + private static Subject getSubject(String keytab, String principal, boolean printDebugLog) { Subject subject = new Subject(false, ImmutableSet.of(new KerberosPrincipal(principal)), Collections.emptySet(), Collections.emptySet()); - javax.security.auth.login.Configuration conf = getConfiguration(keytab, principal); + javax.security.auth.login.Configuration conf = getConfiguration(keytab, principal, printDebugLog); try { LoginContext loginContext = new LoginContext("", subject, null, conf); loginContext.login(); @@ -81,19 +136,22 @@ protected static Subject getSubject(String keytab, String principal) { } } - protected static javax.security.auth.login.Configuration getConfiguration(String keytab, String principal) { + private static javax.security.auth.login.Configuration getConfiguration(String keytab, String principal, + boolean printDebugLog) { return new javax.security.auth.login.Configuration() { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - Map options = ImmutableMap.builder() + ImmutableMap.Builder builder = ImmutableMap.builder() .put("doNotPrompt", "true") .put("isInitiator", "true") .put("useKeyTab", "true") .put("storeKey", "true") .put("keyTab", keytab) - .put("principal", principal) - // .put("debug", "true") - .build(); + .put("principal", principal); + if (printDebugLog) { + builder.put("debug", "true"); + } + Map options = builder.build(); return new AppConfigurationEntry[]{ new AppConfigurationEntry( "com.sun.security.auth.module.Krb5LoginModule", @@ -102,9 +160,4 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String name) { } }; } - - @Override - public T doAs(PrivilegedExceptionAction action) throws Exception { - return ugi.doAs(action); - } } diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java index 44cf01b567fb13..fbe0d0aba7d39f 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopSimpleAuthenticator.java @@ -21,46 +21,27 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.io.IOException; -import java.security.PrivilegedExceptionAction; - public class HadoopSimpleAuthenticator implements HadoopAuthenticator { private static final Logger LOG = LogManager.getLogger(HadoopSimpleAuthenticator.class); - private final SimpleAuthenticationConfig config; - private UserGroupInformation ugi; + private final UserGroupInformation ugi; public HadoopSimpleAuthenticator(SimpleAuthenticationConfig config) { - this.config = config; - } - - @Override - public UserGroupInformation getUGI() { String hadoopUserName = config.getUsername(); if (hadoopUserName == null) { hadoopUserName = "hadoop"; config.setUsername(hadoopUserName); - LOG.debug(AuthenticationConfig.HADOOP_USER_NAME + " is unset, use default user: hadoop"); - } - try { - // get login user just for simple auth - ugi = UserGroupInformation.getLoginUser(); - if (ugi.getUserName().equals(hadoopUserName)) { - return ugi; + if (LOG.isDebugEnabled()) { + LOG.debug("{} is unset, use default user: hadoop", AuthenticationConfig.HADOOP_USER_NAME); } - } catch (IOException e) { - LOG.warn("A SecurityException occurs with simple, do login immediately.", e); } ugi = UserGroupInformation.createRemoteUser(hadoopUserName); - UserGroupInformation.setLoginUser(ugi); - LOG.debug("Login by proxy user, hadoop.username: {}", hadoopUserName); - return ugi; + if (LOG.isDebugEnabled()) { + LOG.debug("Login by proxy user, hadoop.username: {}", hadoopUserName); + } } @Override - public T doAs(PrivilegedExceptionAction action) throws Exception { - if (ugi != null) { - return ugi.doAs(action); - } - return action.run(); + public UserGroupInformation getUGI() { + return ugi; } } diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java index 36470d4ed0383e..d04d772728bc55 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java @@ -25,23 +25,15 @@ import java.io.IOException; import java.security.PrivilegedExceptionAction; +@Deprecated public class HadoopUGI { private static final Logger LOG = LogManager.getLogger(HadoopUGI.class); - public static HadoopAuthenticator getHadoopAuthenticator(AuthenticationConfig config) { - if (config instanceof KerberosAuthenticationConfig) { - return new HadoopKerberosAuthenticator((KerberosAuthenticationConfig) config); - } else { - return new HadoopSimpleAuthenticator((SimpleAuthenticationConfig) config); - } - } - /** * login and return hadoop ugi * @param config auth config * @return ugi */ - @Deprecated private static UserGroupInformation loginWithUGI(AuthenticationConfig config) { if (config == null || !config.isValid()) { return null; @@ -73,7 +65,6 @@ private static UserGroupInformation loginWithUGI(AuthenticationConfig config) { } } - @Deprecated public static T ugiDoAs(AuthenticationConfig authConf, PrivilegedExceptionAction action) { UserGroupInformation ugi = HadoopUGI.loginWithUGI(authConf); try { diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java index f1881d0ef57382..10e42f4bc67ab0 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/ImpersonatingHadoopAuthenticator.java @@ -20,7 +20,6 @@ import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; -import java.security.PrivilegedExceptionAction; import java.util.Objects; public class ImpersonatingHadoopAuthenticator implements HadoopAuthenticator { @@ -35,15 +34,10 @@ public ImpersonatingHadoopAuthenticator(HadoopAuthenticator delegate, String use } @Override - public UserGroupInformation getUGI() throws IOException { - synchronized (ImpersonatingHadoopAuthenticator.class) { + public synchronized UserGroupInformation getUGI() throws IOException { + if (ugi == null) { ugi = UserGroupInformation.createProxyUser(username, delegate.getUGI()); } return ugi; } - - @Override - public T doAs(PrivilegedExceptionAction action) throws Exception { - return ugi.doAs(action); - } } diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java index f3faabac2d72c9..adf76274386f7c 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/KerberosAuthenticationConfig.java @@ -28,6 +28,7 @@ public class KerberosAuthenticationConfig extends AuthenticationConfig { private String kerberosPrincipal; private String kerberosKeytab; private Configuration conf; + private boolean printDebugLog; @Override public boolean isValid() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index c920edb0c42378..fc97da41ac2543 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -23,6 +23,8 @@ import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; import org.apache.doris.common.ThreadPoolManager; +import org.apache.doris.common.security.authentication.AuthenticationConfig; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.CatalogProperty; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; @@ -66,6 +68,7 @@ public class HMSExternalCatalog extends ExternalCatalog { private static final int FILE_SYSTEM_EXECUTOR_THREAD_NUM = 16; private ThreadPoolExecutor fileSystemExecutor; + private HadoopAuthenticator authenticator; public HMSExternalCatalog() { catalogProperty = new CatalogProperty(null, null); @@ -81,6 +84,14 @@ public HMSExternalCatalog(long catalogId, String name, String resource, Map> fileCacheRef = new AtomicReference<>(); - private CachingKerberosAuthenticator authenticator; public HiveMetaStoreCache(HMSExternalCatalog catalog, ExecutorService refreshExecutor, ExecutorService fileListingExecutor) { @@ -169,7 +163,6 @@ public Map loadAll(Iterable authenticatorCache = - authCacheFactory.buildCache(key -> { - HadoopAuthenticator auth = HadoopUGI.getHadoopAuthenticator(config); - return auth.getUGI(); - }, null, this.refreshExecutor); - authenticator = new CachingKerberosAuthenticator(config, authenticatorCache); - } - private void initMetrics() { // partition value GaugeMetric valueCacheGauge = new GaugeMetric("hive_meta_cache", @@ -610,10 +586,6 @@ public void invalidateTableCache(String dbName, String tblName) { } } - public HadoopAuthenticator getCachingKerberosAuthenticator() { - return authenticator; - } - public void invalidatePartitionCache(String dbName, String tblName, String partitionName) { PartitionValueCacheKey key = new PartitionValueCacheKey(dbName, tblName, null); HivePartitionValues partitionValues = partitionValuesCache.getIfPresent(key); @@ -1099,44 +1071,6 @@ private static boolean isGeneratedPath(String name) { } } - @Data - public static class AuthenticatorCacheKey { - private String credentialName; - - public AuthenticatorCacheKey(String credentialName) { - this.credentialName = credentialName; - } - - public static AuthenticatorCacheKey createHadoopAuthKey(AuthenticationConfig config) { - if (config instanceof KerberosAuthenticationConfig) { - return new AuthenticatorCacheKey(((KerberosAuthenticationConfig) config).getKerberosPrincipal()); - } else { - return new AuthenticatorCacheKey(((SimpleAuthenticationConfig) config).getUsername()); - } - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (!(obj instanceof AuthenticatorCacheKey)) { - return false; - } - return credentialName.equals(((AuthenticatorCacheKey) obj).credentialName); - } - - @Override - public int hashCode() { - return Objects.hash(credentialName); - } - - @Override - public String toString() { - return "AuthenticatorCacheKey{" + "credentialName=" + credentialName + '}'; - } - } - @Data public static class HiveFileStatus { BlockLocation[] blockLocations; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index b25d68ed695fc7..f438f5e1782f5a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -67,8 +67,7 @@ public HiveMetadataOps(HiveConf hiveConf, JdbcClientConfig jdbcClientConfig, HMS this(catalog, createCachedClient(hiveConf, Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size), jdbcClientConfig)); - hadoopAuthenticator = Env.getCurrentEnv().getExtMetaCacheMgr().getMetaStoreCache(catalog) - .getCachingKerberosAuthenticator(); + hadoopAuthenticator = catalog.getAuthenticator(); client.setHadoopAuthenticator(hadoopAuthenticator); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java deleted file mode 100644 index 8da370a32c6893..00000000000000 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/security/CachingKerberosAuthenticator.java +++ /dev/null @@ -1,50 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.datasource.hive.security; - -import org.apache.doris.common.security.authentication.AuthenticationConfig; -import org.apache.doris.common.security.authentication.HadoopAuthenticator; -import org.apache.doris.datasource.hive.HiveMetaStoreCache; - -import com.github.benmanes.caffeine.cache.LoadingCache; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.IOException; -import java.security.PrivilegedExceptionAction; - -public class CachingKerberosAuthenticator implements HadoopAuthenticator { - private final AuthenticationConfig config; - private final LoadingCache authenticatorCache; - - public CachingKerberosAuthenticator(AuthenticationConfig config, - LoadingCache authCacheLoader) { - this.config = config; - this.authenticatorCache = authCacheLoader; - } - - @Override - public UserGroupInformation getUGI() throws IOException { - return authenticatorCache.get(HiveMetaStoreCache.AuthenticatorCacheKey.createHadoopAuthKey(config)); - } - - @Override - public T doAs(PrivilegedExceptionAction action) throws Exception { - return getUGI().doAs(action); - } -} diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java index 68de3a8fdef86f..3cb8a036c2d5f1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/RemoteFileSystem.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.RemoteIterator; import java.io.FileNotFoundException; +import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Set; @@ -56,7 +57,7 @@ public Status listFiles(String remotePath, boolean recursive, List r try { org.apache.hadoop.fs.FileSystem fileSystem = nativeFileSystem(remotePath); Path locatedPath = new Path(remotePath); - RemoteIterator locatedFiles = fileSystem.listFiles(locatedPath, recursive); + RemoteIterator locatedFiles = getLocatedFiles(recursive, fileSystem, locatedPath); while (locatedFiles.hasNext()) { LocatedFileStatus fileStatus = locatedFiles.next(); RemoteFile location = new RemoteFile( @@ -72,11 +73,16 @@ public Status listFiles(String remotePath, boolean recursive, List r return Status.OK; } + protected RemoteIterator getLocatedFiles(boolean recursive, + FileSystem fileSystem, Path locatedPath) throws IOException { + return fileSystem.listFiles(locatedPath, recursive); + } + @Override public Status listDirectories(String remotePath, Set result) { try { FileSystem fileSystem = nativeFileSystem(remotePath); - FileStatus[] fileStatuses = fileSystem.listStatus(new Path(remotePath)); + FileStatus[] fileStatuses = getFileStatuses(remotePath, fileSystem); result.addAll( Arrays.stream(fileStatuses) .filter(FileStatus::isDirectory) @@ -88,6 +94,10 @@ public Status listDirectories(String remotePath, Set result) { return Status.OK; } + protected FileStatus[] getFileStatuses(String remotePath, FileSystem fileSystem) throws IOException { + return fileSystem.listStatus(new Path(remotePath)); + } + @Override public Status renameDir(String origFilePath, String destFilePath, diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java index 5532f1187fe2bd..59fbd73bda78cf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java @@ -21,7 +21,7 @@ import org.apache.doris.backup.Status; import org.apache.doris.common.UserException; import org.apache.doris.common.security.authentication.AuthenticationConfig; -import org.apache.doris.common.security.authentication.HadoopUGI; +import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.common.util.URI; import org.apache.doris.fs.operations.HDFSFileOperations; import org.apache.doris.fs.operations.HDFSOpParams; @@ -35,7 +35,9 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -59,6 +61,7 @@ public class DFSFileSystem extends RemoteFileSystem { public static final String PROP_ALLOW_FALLBACK_TO_SIMPLE_AUTH = "ipc.client.fallback-to-simple-auth-allowed"; private static final Logger LOG = LogManager.getLogger(DFSFileSystem.class); private HDFSFileOperations operations = null; + private HadoopAuthenticator authenticator = null; public DFSFileSystem(Map properties) { this(StorageBackend.StorageType.HDFS, properties); @@ -79,14 +82,19 @@ public FileSystem nativeFileSystem(String remotePath) throws UserException { for (Map.Entry propEntry : properties.entrySet()) { conf.set(propEntry.getKey(), propEntry.getValue()); } - - dfsFileSystem = HadoopUGI.ugiDoAs(AuthenticationConfig.getKerberosConfig(conf), () -> { - try { - return FileSystem.get(new Path(remotePath).toUri(), conf); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + AuthenticationConfig authConfig = AuthenticationConfig.getKerberosConfig(conf); + authenticator = HadoopAuthenticator.getHadoopAuthenticator(authConfig); + try { + dfsFileSystem = authenticator.doAs(() -> { + try { + return FileSystem.get(new Path(remotePath).toUri(), conf); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } catch (Exception e) { + throw new UserException(e); + } operations = new HDFSFileOperations(dfsFileSystem); } } @@ -94,6 +102,15 @@ public FileSystem nativeFileSystem(String remotePath) throws UserException { return dfsFileSystem; } + protected RemoteIterator getLocatedFiles(boolean recursive, + FileSystem fileSystem, Path locatedPath) throws IOException { + return authenticator.doAs(() -> fileSystem.listFiles(locatedPath, recursive)); + } + + protected FileStatus[] getFileStatuses(String remotePath, FileSystem fileSystem) throws IOException { + return authenticator.doAs(() -> fileSystem.listStatus(new Path(remotePath))); + } + public static Configuration getHdfsConf(boolean fallbackToSimpleAuth) { Configuration hdfsConf = new HdfsConfiguration(); if (fallbackToSimpleAuth) { @@ -265,7 +282,7 @@ public Status exists(String remotePath) { URI pathUri = URI.create(remotePath); Path inputFilePath = new Path(pathUri.getPath()); FileSystem fileSystem = nativeFileSystem(remotePath); - boolean isPathExist = fileSystem.exists(inputFilePath); + boolean isPathExist = authenticator.doAs(() -> fileSystem.exists(inputFilePath)); if (!isPathExist) { return new Status(Status.ErrCode.NOT_FOUND, "remote path does not exist: " + remotePath); } @@ -380,7 +397,7 @@ public Status rename(String srcPath, String destPath) { FileSystem fileSystem = nativeFileSystem(destPath); Path srcfilePath = new Path(srcPathUri.getPath()); Path destfilePath = new Path(destPathUri.getPath()); - boolean isRenameSuccess = fileSystem.rename(srcfilePath, destfilePath); + boolean isRenameSuccess = authenticator.doAs(() -> fileSystem.rename(srcfilePath, destfilePath)); if (!isRenameSuccess) { return new Status(Status.ErrCode.COMMON_ERROR, "failed to rename " + srcPath + " to " + destPath); } @@ -401,7 +418,7 @@ public Status delete(String remotePath) { URI pathUri = URI.create(remotePath); Path inputFilePath = new Path(pathUri.getPath()); FileSystem fileSystem = nativeFileSystem(remotePath); - fileSystem.delete(inputFilePath, true); + authenticator.doAs(() -> fileSystem.delete(inputFilePath, true)); } catch (UserException e) { return new Status(Status.ErrCode.COMMON_ERROR, e.getMessage()); } catch (IOException e) { @@ -427,7 +444,7 @@ public Status globList(String remotePath, List result, boolean fileN URI pathUri = URI.create(remotePath); FileSystem fileSystem = nativeFileSystem(remotePath); Path pathPattern = new Path(pathUri.getPath()); - FileStatus[] files = fileSystem.globStatus(pathPattern); + FileStatus[] files = authenticator.doAs(() -> fileSystem.globStatus(pathPattern)); if (files == null) { LOG.info("no files in path " + remotePath); return Status.OK; @@ -454,7 +471,7 @@ public Status globList(String remotePath, List result, boolean fileN public Status makeDir(String remotePath) { try { FileSystem fileSystem = nativeFileSystem(remotePath); - if (!fileSystem.mkdirs(new Path(remotePath))) { + if (!authenticator.doAs(() -> fileSystem.mkdirs(new Path(remotePath)))) { LOG.warn("failed to make dir for " + remotePath); return new Status(Status.ErrCode.COMMON_ERROR, "failed to make dir for " + remotePath); } diff --git a/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy index 50a20f5a6a95cd..7e7f276236adaa 100644 --- a/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy +++ b/regression-test/suites/external_table_p0/kerberos/test_two_hive_kerberos.groovy @@ -92,7 +92,7 @@ suite("test_two_hive_kerberos", "p0,external,kerberos,external_docker,external_d Assert.fail(); } }) - + sleep(5000L) thread1.start() thread2.start() @@ -100,5 +100,6 @@ suite("test_two_hive_kerberos", "p0,external,kerberos,external_docker,external_d thread2.join() sql """drop catalog ${hms_catalog_name};""" sql """drop catalog other_${hms_catalog_name};""" + // TODO: add tvf case } } From c184c1906b4de7d915bdd8624a5cdc53694d08e1 Mon Sep 17 00:00:00 2001 From: slothever Date: Thu, 18 Jul 2024 10:09:31 +0800 Subject: [PATCH 3/3] fix --- .../doris/datasource/hive/HMSCachedClient.java | 4 +++- .../doris/datasource/hive/HMSExternalCatalog.java | 14 ++++++-------- .../hive/PostgreSQLJdbcHMSCachedClient.java | 6 ------ .../doris/datasource/TestHMSCachedClient.java | 6 ------ 4 files changed, 9 insertions(+), 21 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java index 08be68441ffc03..a5e0eefb3483aa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java @@ -114,7 +114,9 @@ void updatePartitionStatistics( void dropPartition(String dbName, String tableName, List partitionValues, boolean deleteData); - void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator); + default void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator) { + // Ignored by default + } /** * close the connection, eg, to hms diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index fc97da41ac2543..a22eacaf1e4fc1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -40,7 +40,9 @@ import org.apache.doris.fs.remote.dfs.DFSFileSystem; import org.apache.doris.transaction.TransactionManagerFactory; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; +import lombok.Getter; import org.apache.commons.lang3.math.NumberUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.logging.log4j.LogManager; @@ -68,8 +70,10 @@ public class HMSExternalCatalog extends ExternalCatalog { private static final int FILE_SYSTEM_EXECUTOR_THREAD_NUM = 16; private ThreadPoolExecutor fileSystemExecutor; + @Getter private HadoopAuthenticator authenticator; + @VisibleForTesting public HMSExternalCatalog() { catalogProperty = new CatalogProperty(null, null); } @@ -82,14 +86,8 @@ public HMSExternalCatalog(long catalogId, String name, String resource, Map getTableList(String dbName) { } return tablesList; } - - @Override - public void setHadoopAuthenticator(HadoopAuthenticator hadoopAuthenticator) { - - } }