Skip to content

Commit

Permalink
Strip kerberos out of the hdfs module
Browse files Browse the repository at this point in the history
  • Loading branch information
epugh committed Nov 22, 2024
1 parent d8b3d00 commit 9f8f9c5
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@
*/
package org.apache.solr.hdfs;

import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;

import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.RemovalCause;
import com.google.common.annotations.VisibleForTesting;
Expand All @@ -40,7 +38,6 @@
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.LockFactory;
Expand Down Expand Up @@ -102,10 +99,6 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory

public static final String LOCALITYMETRICS_ENABLED = "solr.hdfs.locality.metrics.enabled";

public static final String KERBEROS_ENABLED = "solr.hdfs.security.kerberos.enabled";
public static final String KERBEROS_KEYTAB = "solr.hdfs.security.kerberos.keytabfile";
public static final String KERBEROS_PRINCIPAL = "solr.hdfs.security.kerberos.principal";

public static final String HDFS_HOME = "solr.hdfs.home";

public static final String CONFIG_DIRECTORY = "solr.hdfs.confdir";
Expand All @@ -128,7 +121,6 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory
private static BlockCache globalBlockCache;

public static Metrics metrics;
private static Boolean kerberosInit;

// we use this cache for FileSystem instances when we don't have access to a long lived instance
private final com.github.benmanes.caffeine.cache.Cache<String, FileSystem> tmpFsCache =
Expand Down Expand Up @@ -179,13 +171,7 @@ public void init(NamedList<?> args) {
}
cacheMerges = getConfig(CACHE_MERGES, false);
cacheReadOnce = getConfig(CACHE_READONCE, false);
boolean kerberosEnabled = getConfig(KERBEROS_ENABLED, false);
if (log.isInfoEnabled()) {
log.info("Solr Kerberos Authentication {}", (kerberosEnabled ? "enabled" : "disabled"));
}
if (kerberosEnabled) {
initKerberos();
}

if (StrUtils.isNullOrEmpty(
EnvUtils.getProperty(SplitShardCmd.SHARDSPLIT_CHECKDISKSPACE_ENABLED))) {
System.setProperty(SplitShardCmd.SHARDSPLIT_CHECKDISKSPACE_ENABLED, "false");
Expand Down Expand Up @@ -530,49 +516,6 @@ public String getConfDir() {
return confDir;
}

private void initKerberos() {
String keytabFile = getConfig(KERBEROS_KEYTAB, "").trim();
if (keytabFile.length() == 0) {
throw new IllegalArgumentException(
KERBEROS_KEYTAB + " required because " + KERBEROS_ENABLED + " set to true");
}
String principal = getConfig(KERBEROS_PRINCIPAL, "");
if (principal.length() == 0) {
throw new IllegalArgumentException(
KERBEROS_PRINCIPAL + " required because " + KERBEROS_ENABLED + " set to true");
}
synchronized (HdfsDirectoryFactory.class) {
if (kerberosInit == null) {
kerberosInit = Boolean.TRUE;
final Configuration conf = getConf(null);
final String authVal = conf.get(HADOOP_SECURITY_AUTHENTICATION);
final String kerberos = "kerberos";
if (authVal != null && !authVal.equals(kerberos)) {
throw new IllegalArgumentException(
HADOOP_SECURITY_AUTHENTICATION
+ " set to: "
+ authVal
+ ", not kerberos, but attempting to "
+ " connect to HDFS via kerberos");
}
// let's avoid modifying the supplied configuration, just to be conservative
final Configuration ugiConf = new Configuration(getConf(null));
ugiConf.set(HADOOP_SECURITY_AUTHENTICATION, kerberos);
UserGroupInformation.setConfiguration(ugiConf);
log.info(
"Attempting to acquire kerberos ticket with keytab: {}, principal: {} ",
keytabFile,
principal);
try {
UserGroupInformation.loginUserFromKeytab(principal, keytabFile);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
log.info("Got Kerberos ticket");
}
}
}

@Override
public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
MetricsHolder.metrics.initializeMetrics(parentContext, scope);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,15 +276,3 @@ Here is a sample `solrconfig.xml` configuration for storing Solr indexes on HDFS
<int name="solr.hdfs.nrtcachingdirectory.maxcachedmb">192</int>
</directoryFactory>
----

If using Kerberos, you will need to add the three Kerberos related properties to the `<directoryFactory>` element in `solrconfig.xml`, such as:

[source,xml]
----
<directoryFactory name="DirectoryFactory" class="solr.HdfsDirectoryFactory">
...
<bool name="solr.hdfs.security.kerberos.enabled">true</bool>
<str name="solr.hdfs.security.kerberos.keytabfile">/etc/krb5.keytab</str>
<str name="solr.hdfs.security.kerberos.principal">solr/[email protected]</str>
</directoryFactory>
----

0 comments on commit 9f8f9c5

Please sign in to comment.