Skip to content

Commit

Permalink
[apache#3537] improvement(hadooop-catalog): Add e2e user authenticati…
Browse files Browse the repository at this point in the history
…on tests for Hadoop catalog. (apache#3552)

### What changes were proposed in this pull request?

Add e2e user authentication tests for the Hadoop catalog.

### Why are the changes needed?

Fix: apache#3537 

### Does this PR introduce _any_ user-facing change?

N/A.

### How was this patch tested?

Test locally.
  • Loading branch information
yuqi1129 authored and diqiu50 committed Jun 13, 2024
1 parent 7e0f6f4 commit 209fca7
Show file tree
Hide file tree
Showing 3 changed files with 246 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,10 @@
import org.slf4j.LoggerFactory;

public class KerberosClient {
private static final Logger LOG = LoggerFactory.getLogger(KerberosClient.class);

public static final String GRAVITINO_KEYTAB_FORMAT = "keytabs/gravitino-%s-keytab";

private static final Logger LOG = LoggerFactory.getLogger(KerberosClient.class);

private final ScheduledThreadPoolExecutor checkTgtExecutor;
private final Map<String, String> conf;
private final Configuration hadoopConf;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
/*
* Copyright 2024 Datastrato Pvt Ltd.
* This software is licensed under the Apache License version 2.
*/

package com.datastrato.gravitino.catalog.hadoop.integration.test;

import static com.datastrato.gravitino.catalog.hadoop.kerberos.AuthenticationConfig.AUTH_TYPE_KEY;
import static com.datastrato.gravitino.catalog.hadoop.kerberos.AuthenticationConfig.ENABLE_AUTH_KEY;
import static com.datastrato.gravitino.catalog.hadoop.kerberos.KerberosConfig.IMPERSONATION_ENABLE_KEY;
import static com.datastrato.gravitino.catalog.hadoop.kerberos.KerberosConfig.KEY_TAB_URI_KEY;
import static com.datastrato.gravitino.catalog.hadoop.kerberos.KerberosConfig.PRINCIPAL_KEY;

import com.datastrato.gravitino.Catalog;
import com.datastrato.gravitino.NameIdentifier;
import com.datastrato.gravitino.SchemaChange;
import com.datastrato.gravitino.client.GravitinoAdminClient;
import com.datastrato.gravitino.client.GravitinoMetalake;
import com.datastrato.gravitino.client.KerberosTokenProvider;
import com.datastrato.gravitino.file.Fileset;
import com.datastrato.gravitino.integration.test.container.ContainerSuite;
import com.datastrato.gravitino.integration.test.container.HiveContainer;
import com.datastrato.gravitino.integration.test.util.AbstractIT;
import com.datastrato.gravitino.integration.test.util.GravitinoITUtils;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sun.security.krb5.KrbException;

@Tag("gravitino-docker-it")
public class HadoopUserAuthenticationIT extends AbstractIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopUserAuthenticationIT.class);

private static final ContainerSuite containerSuite = ContainerSuite.getInstance();

private static final String SDK_KERBEROS_PRINCIPAL_KEY = "client.kerberos.principal";
private static final String SDK_KERBEROS_KEYTAB_KEY = "client.kerberos.keytab";

private static final String GRAVITINO_CLIENT_PRINCIPAL = "gravitino_client@HADOOPKRB";
private static final String GRAVITINO_CLIENT_KEYTAB = "/gravitino_client.keytab";

private static final String GRAVITINO_SERVER_PRINCIPAL = "HTTP/localhost@HADOOPKRB";
private static final String GRAVITINO_SERVER_KEYTAB = "/gravitino_server.keytab";

private static final String HADOOP_CLIENT_PRINCIPAL = "cli@HADOOPKRB";
private static final String HADOOP_CLIENT_KEYTAB = "/client.keytab";

private static String TMP_DIR;

private static String HDFS_URL;

private static GravitinoAdminClient adminClient;

private static HiveContainer kerberosHiveContainer;

private static final String METALAKE_NAME =
GravitinoITUtils.genRandomName("CatalogHadoop_metalake");
private static final String CATALOG_NAME =
GravitinoITUtils.genRandomName("CatalogHadoop_catalog");
private static final String SCHEMA_NAME = GravitinoITUtils.genRandomName("CatalogHadoop_schema");

@SuppressWarnings("unused")
private static final String TABLE_NAME = "test_table";

@BeforeAll
public static void startIntegrationTest() throws Exception {
containerSuite.startKerberosHiveContainer();
kerberosHiveContainer = containerSuite.getKerberosHiveContainer();

File baseDir = new File(System.getProperty("java.io.tmpdir"));
File file = Files.createTempDirectory(baseDir.toPath(), "test").toFile();
file.deleteOnExit();
TMP_DIR = file.getAbsolutePath();

HDFS_URL = String.format("hdfs://%s:9000", kerberosHiveContainer.getContainerIpAddress());

// Prepare kerberos related-config;
prepareKerberosConfig();

// Config kerberos configuration for Gravitino server
addKerberosConfig();

// Start Gravitino server
AbstractIT.startIntegrationTest();
}

@AfterAll
public static void stop() {
// Reset the UGI
UserGroupInformation.reset();

// Clean up the kerberos configuration
System.clearProperty("java.security.krb5.conf");
System.clearProperty("sun.security.krb5.debug");
}

private static void prepareKerberosConfig() throws IOException, KrbException {
// Keytab of the Gravitino SDK client
kerberosHiveContainer
.getContainer()
.copyFileFromContainer("/gravitino_client.keytab", TMP_DIR + GRAVITINO_CLIENT_KEYTAB);

// Keytab of the Gravitino server
kerberosHiveContainer
.getContainer()
.copyFileFromContainer("/gravitino_server.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);

// Keytab of Gravitino server to connector to HDFS
kerberosHiveContainer
.getContainer()
.copyFileFromContainer("/etc/admin.keytab", TMP_DIR + HADOOP_CLIENT_KEYTAB);

String tmpKrb5Path = TMP_DIR + "krb5.conf_tmp";
String krb5Path = TMP_DIR + "krb5.conf";
kerberosHiveContainer.getContainer().copyFileFromContainer("/etc/krb5.conf", tmpKrb5Path);

// Modify the krb5.conf and change the kdc and admin_server to the container IP
String ip = containerSuite.getKerberosHiveContainer().getContainerIpAddress();
String content = FileUtils.readFileToString(new File(tmpKrb5Path), StandardCharsets.UTF_8);
content = content.replace("kdc = localhost:88", "kdc = " + ip + ":88");
content = content.replace("admin_server = localhost", "admin_server = " + ip + ":749");
FileUtils.write(new File(krb5Path), content, StandardCharsets.UTF_8);

LOG.info("Kerberos kdc config:\n{}", content);
System.setProperty("java.security.krb5.conf", krb5Path);
System.setProperty("sun.security.krb5.debug", "true");
}

private static void addKerberosConfig() {
AbstractIT.customConfigs.put("gravitino.authenticator", "kerberos");
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
}

@Test
public void testUserAuthentication() {
KerberosTokenProvider provider =
KerberosTokenProvider.builder()
.withClientPrincipal(GRAVITINO_CLIENT_PRINCIPAL)
.withKeyTabFile(new File(TMP_DIR + GRAVITINO_CLIENT_KEYTAB))
.build();
adminClient = GravitinoAdminClient.builder(serverUri).withKerberosAuth(provider).build();

GravitinoMetalake[] metalakes = adminClient.listMetalakes();
Assertions.assertEquals(0, metalakes.length);

GravitinoMetalake gravitinoMetalake =
adminClient.createMetalake(METALAKE_NAME, null, ImmutableMap.of());

// Create a catalog
Map<String, String> properties = Maps.newHashMap();

properties.put(ENABLE_AUTH_KEY, "true");
properties.put(AUTH_TYPE_KEY, "kerberos");
properties.put(IMPERSONATION_ENABLE_KEY, "true");
properties.put(KEY_TAB_URI_KEY, TMP_DIR + HADOOP_CLIENT_KEYTAB);
properties.put(PRINCIPAL_KEY, HADOOP_CLIENT_PRINCIPAL);
properties.put("location", HDFS_URL + "/user/hadoop/");

kerberosHiveContainer.executeInContainer("hadoop", "fs", "-mkdir", "/user/hadoop");

Catalog catalog =
gravitinoMetalake.createCatalog(
CATALOG_NAME, Catalog.Type.FILESET, "hadoop", "comment", properties);

// Test create schema
Exception exception =
Assertions.assertThrows(
Exception.class,
() -> catalog.asSchemas().createSchema(SCHEMA_NAME, "comment", ImmutableMap.of()));
String exceptionMessage = Throwables.getStackTraceAsString(exception);
// Make sure real user is 'gravitino_client'
Assertions.assertTrue(
exceptionMessage.contains("Permission denied: user=gravitino_client, access=WRITE"));

// Now try to give the user the permission to create schema again
kerberosHiveContainer.executeInContainer("hadoop", "fs", "-chmod", "-R", "777", "/user/hadoop");
Assertions.assertDoesNotThrow(
() -> catalog.asSchemas().createSchema(SCHEMA_NAME, "comment", ImmutableMap.of()));

catalog
.asFilesetCatalog()
.createFileset(
NameIdentifier.of(METALAKE_NAME, CATALOG_NAME, SCHEMA_NAME, TABLE_NAME),
"comment",
Fileset.Type.MANAGED,
null,
ImmutableMap.of());

catalog
.asFilesetCatalog()
.dropFileset(NameIdentifier.of(METALAKE_NAME, CATALOG_NAME, SCHEMA_NAME, TABLE_NAME));

catalog.asSchemas().alterSchema(SCHEMA_NAME, SchemaChange.setProperty("k1", "value1"));

catalog.asSchemas().dropSchema(SCHEMA_NAME, true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
Expand All @@ -54,11 +55,10 @@ public class HadoopUserImpersonationIT extends AbstractIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopCatalogIT.class);

public static final String metalakeName =
GravitinoITUtils.genRandomName("CatalogFilesetIT_metalake");
GravitinoITUtils.genRandomName("CatalogHadoopIT_metalake");
public static final String catalogName =
GravitinoITUtils.genRandomName("CatalogFilesetIT_catalog");
public static final String SCHEMA_PREFIX = "CatalogFilesetIT_schema";
public static final String schemaName = GravitinoITUtils.genRandomName(SCHEMA_PREFIX);
GravitinoITUtils.genRandomName("CatalogHadoopIT_catalog");
public static final String schemaName = GravitinoITUtils.genRandomName("CatalogFilesetIT_schema");
private static final String provider = "hadoop";
private static GravitinoMetalake metalake;
private static Catalog catalog;
Expand All @@ -78,6 +78,22 @@ public class HadoopUserImpersonationIT extends AbstractIT {
private static String hdfsUri;
private static UserGroupInformation clientUGI;

private static void refreshKerberosConfig() {
Class<?> classRef;
try {
if (System.getProperty("java.vendor").contains("IBM")) {
classRef = Class.forName("com.ibm.security.krb5.internal.Config");
} else {
classRef = Class.forName("sun.security.krb5.Config");
}

Method refershMethod = classRef.getMethod("refresh");
refershMethod.invoke(null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}

@BeforeAll
public static void setup() throws Exception {
if (!isEmbedded()) {
Expand All @@ -93,7 +109,6 @@ public static void setup() throws Exception {
kdc.start();

String krb5ConfFile = kdc.getKrb5conf().getAbsolutePath();
System.setProperty("java.security.krb5.conf", krb5ConfFile);

// Reload config when krb5 conf is setup
if (SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_1_8)) {
Expand Down Expand Up @@ -134,6 +149,13 @@ public static void setup() throws Exception {
conf.set("hadoop.proxyuser.hdfs.users", "*");
conf.set(
"hadoop.security.auth_to_local", "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/hadoop/\nDEFAULT");

System.setProperty("java.security.krb5.conf", krb5ConfFile);
refreshKerberosConfig();
KerberosName.resetDefaultRealm();

LOG.info("Kerberos kdc config:\n{}", KerberosName.getDefaultRealm());

UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(
SERVER_PRINCIPAL.replaceAll("_HOST", HOSTNAME) + "@" + kdc.getRealm(),
Expand Down Expand Up @@ -187,7 +209,9 @@ public static void stop() {
kdcWorkDir.delete();
}

UserGroupInformation.reset();
System.clearProperty("sun.security.krb5.debug");
System.clearProperty("java.security.krb5.conf");
}

@Test
Expand Down

0 comments on commit 209fca7

Please sign in to comment.