Skip to content

Commit

Permalink
Merge branch 'main' into fix-5157
Browse files Browse the repository at this point in the history
  • Loading branch information
tyoushinya authored Oct 17, 2024
2 parents 9d20be5 + b7f4e34 commit c7470d2
Show file tree
Hide file tree
Showing 61 changed files with 324 additions and 325 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.meta.AuditInfo;
import org.apache.gravitino.meta.RoleEntity;
Expand All @@ -71,7 +71,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class RangerHiveE2EIT extends AbstractIT {
public class RangerHiveE2EIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(RangerHiveE2EIT.class);

private static RangerAuthorizationPlugin rangerAuthPlugin;
Expand Down Expand Up @@ -99,15 +99,15 @@ public class RangerHiveE2EIT extends AbstractIT {
private static String RANGER_ADMIN_URL = null;

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
// Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase());
configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
super.startIntegrationTest();

RangerITEnv.setup();
RangerITEnv.startHiveRangerContainer();
Expand Down Expand Up @@ -180,7 +180,8 @@ private static void generateRangerSparkSecurityXML() throws IOException {
}

@AfterAll
public static void stop() throws IOException {
public void stop() throws IOException {
client = null;
if (client != null) {
Arrays.stream(catalog.asSchemas().listSchemas())
.filter(schema -> !schema.equals("default"))
Expand All @@ -204,7 +205,7 @@ public static void stop() throws IOException {
LOG.error("Failed to close CloseableGroup", e);
}

AbstractIT.client = null;
client = null;
}

@Test
Expand Down Expand Up @@ -267,7 +268,7 @@ void testAllowUseSchemaPrivilege() throws InterruptedException {
1, rows2.stream().filter(row -> row.getString(0).equals(schemaName)).count());
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.apache.gravitino.file.FilesetChange;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -57,7 +57,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class HadoopCatalogIT extends AbstractIT {
public class HadoopCatalogIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopCatalogIT.class);
private static final ContainerSuite containerSuite = ContainerSuite.getInstance();

Expand All @@ -74,7 +74,7 @@ public class HadoopCatalogIT extends AbstractIT {
private static String defaultBaseLocation;

@BeforeAll
public static void setup() throws IOException {
public void setup() throws IOException {
containerSuite.startHiveContainer();
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultBaseLocation());
Expand All @@ -86,7 +86,7 @@ public static void setup() throws IOException {
}

@AfterAll
public static void stop() throws IOException {
public void stop() throws IOException {
Catalog catalog = metalake.loadCatalog(catalogName);
catalog.asSchemas().dropSchema(schemaName, true);
metalake.dropCatalog(catalogName);
Expand All @@ -102,7 +102,7 @@ public static void stop() throws IOException {
}
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand All @@ -114,14 +114,14 @@ private static void createMetalake() {
metalake = loadMetalake;
}

private static void createCatalog() {
private void createCatalog() {
metalake.createCatalog(
catalogName, Catalog.Type.FILESET, provider, "comment", ImmutableMap.of());

catalog = metalake.loadCatalog(catalogName);
}

private static void createSchema() {
private void createSchema() {
Map<String, String> properties = Maps.newHashMap();
properties.put("key1", "val1");
properties.put("key2", "val2");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
import org.apache.gravitino.file.Fileset;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.jupiter.api.AfterAll;
Expand All @@ -60,7 +60,7 @@
import sun.security.krb5.KrbException;

@Tag("gravitino-docker-test")
public class HadoopUserAuthenticationIT extends AbstractIT {
public class HadoopUserAuthenticationIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopUserAuthenticationIT.class);

private static final ContainerSuite containerSuite = ContainerSuite.getInstance();
Expand Down Expand Up @@ -104,7 +104,7 @@ public class HadoopUserAuthenticationIT extends AbstractIT {
private static final String TABLE_NAME = "test_table";

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
containerSuite.startKerberosHiveContainer();
kerberosHiveContainer = containerSuite.getKerberosHiveContainer();

Expand All @@ -122,7 +122,7 @@ public static void startIntegrationTest() throws Exception {
addKerberosConfig();

// Start Gravitino server
AbstractIT.startIntegrationTest();
super.startIntegrationTest();
}

@AfterAll
Expand Down Expand Up @@ -222,14 +222,12 @@ private static void createKeyTableForSchemaAndFileset() throws IOException {
.copyFileFromContainer(HADOOP_FILESET_KEYTAB, TMP_DIR + HADOOP_FILESET_KEYTAB);
}

private static void addKerberosConfig() {
AbstractIT.customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
private void addKerberosConfig() {
customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
customConfigs.put("gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
customConfigs.put("gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
import org.apache.gravitino.exceptions.FilesetAlreadyExistsException;
import org.apache.gravitino.exceptions.IllegalNameIdentifierException;
import org.apache.gravitino.file.Fileset;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.integration.test.util.ITUtils;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -67,7 +67,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class HadoopUserImpersonationIT extends AbstractIT {
public class HadoopUserImpersonationIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopCatalogIT.class);

public static final String metalakeName =
Expand Down Expand Up @@ -111,7 +111,7 @@ private static void refreshKerberosConfig() {
}

@BeforeAll
public static void setup() throws Exception {
public void setup() throws Exception {
if (!isEmbedded()) {
return;
}
Expand Down Expand Up @@ -254,7 +254,7 @@ void testListFileSystem() throws Exception {
});
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
import org.apache.gravitino.hive.HiveClientPool;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.rel.Column;
import org.apache.gravitino.rel.Table;
Expand Down Expand Up @@ -108,13 +108,11 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CatalogHiveIT extends AbstractIT {
public class CatalogHiveIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(CatalogHiveIT.class);
public static final String metalakeName =
GravitinoITUtils.genRandomName("CatalogHiveIT_metalake");
Expand Down Expand Up @@ -253,7 +251,7 @@ public void stop() throws IOException {
LOG.error("Failed to close CloseableGroup", e);
}

AbstractIT.client = null;
client = null;
}

@AfterEach
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.apache.gravitino.client.KerberosTokenProvider;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.rel.Column;
import org.apache.gravitino.rel.TableChange;
Expand All @@ -63,7 +63,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class HiveUserAuthenticationIT extends AbstractIT {
public class HiveUserAuthenticationIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HiveUserAuthenticationIT.class);

private static final ContainerSuite containerSuite = ContainerSuite.getInstance();
Expand Down Expand Up @@ -98,7 +98,7 @@ public class HiveUserAuthenticationIT extends AbstractIT {
private static final String HIVE_COL_NAME3 = "col3";

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
containerSuite.startKerberosHiveContainer();
kerberosHiveContainer = containerSuite.getKerberosHiveContainer();

Expand All @@ -119,11 +119,11 @@ public static void startIntegrationTest() throws Exception {
addKerberosConfig();

// Start Gravitino server
AbstractIT.startIntegrationTest();
super.startIntegrationTest();
}

@AfterAll
public static void stop() {
public void stop() {
// Reset the UGI
UserGroupInformation.reset();

Expand All @@ -132,7 +132,7 @@ public static void stop() {
System.clearProperty("java.security.krb5.conf");
System.clearProperty("sun.security.krb5.debug");

AbstractIT.client = null;
client = null;
}

private static void prepareKerberosConfig() throws Exception {
Expand Down Expand Up @@ -188,14 +188,12 @@ private static void refreshKerberosConfig() {
}
}

private static void addKerberosConfig() {
AbstractIT.customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
private void addKerberosConfig() {
customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
customConfigs.put("gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
customConfigs.put("gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
}

@Test
Expand Down
Loading

0 comments on commit c7470d2

Please sign in to comment.