Skip to content

Commit

Permalink
Merge branch 'main' of github.com:apache/gravitino into issue_5074
Browse files Browse the repository at this point in the history
  • Loading branch information
yuqi1129 committed Oct 16, 2024
2 parents 27a911a + b7f4e34 commit e34dbea
Show file tree
Hide file tree
Showing 61 changed files with 319 additions and 322 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.meta.AuditInfo;
import org.apache.gravitino.meta.RoleEntity;
Expand All @@ -71,7 +71,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class RangerHiveE2EIT extends AbstractIT {
public class RangerHiveE2EIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(RangerHiveE2EIT.class);

private static RangerAuthorizationPlugin rangerAuthPlugin;
Expand Down Expand Up @@ -99,15 +99,15 @@ public class RangerHiveE2EIT extends AbstractIT {
private static String RANGER_ADMIN_URL = null;

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
// Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase());
configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
super.startIntegrationTest();

RangerITEnv.setup();
RangerITEnv.startHiveRangerContainer();
Expand Down Expand Up @@ -180,7 +180,8 @@ private static void generateRangerSparkSecurityXML() throws IOException {
}

@AfterAll
public static void stop() throws IOException {
public void stop() throws IOException {
client = null;
if (client != null) {
Arrays.stream(catalog.asSchemas().listSchemas())
.filter(schema -> !schema.equals("default"))
Expand All @@ -204,7 +205,7 @@ public static void stop() throws IOException {
LOG.error("Failed to close CloseableGroup", e);
}

AbstractIT.client = null;
client = null;
}

@Test
Expand Down Expand Up @@ -267,7 +268,7 @@ void testAllowUseSchemaPrivilege() throws InterruptedException {
1, rows2.stream().filter(row -> row.getString(0).equals(schemaName)).count());
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.apache.gravitino.file.FilesetChange;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -53,13 +53,11 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class HadoopCatalogIT extends AbstractIT {
public class HadoopCatalogIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopCatalogIT.class);
protected static final ContainerSuite containerSuite = ContainerSuite.getInstance();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
import org.apache.gravitino.file.Fileset;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.jupiter.api.AfterAll;
Expand All @@ -60,7 +60,7 @@
import sun.security.krb5.KrbException;

@Tag("gravitino-docker-test")
public class HadoopUserAuthenticationIT extends AbstractIT {
public class HadoopUserAuthenticationIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopUserAuthenticationIT.class);

private static final ContainerSuite containerSuite = ContainerSuite.getInstance();
Expand Down Expand Up @@ -104,7 +104,7 @@ public class HadoopUserAuthenticationIT extends AbstractIT {
private static final String TABLE_NAME = "test_table";

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
containerSuite.startKerberosHiveContainer();
kerberosHiveContainer = containerSuite.getKerberosHiveContainer();

Expand All @@ -122,7 +122,7 @@ public static void startIntegrationTest() throws Exception {
addKerberosConfig();

// Start Gravitino server
AbstractIT.startIntegrationTest();
super.startIntegrationTest();
}

@AfterAll
Expand Down Expand Up @@ -222,14 +222,12 @@ private static void createKeyTableForSchemaAndFileset() throws IOException {
.copyFileFromContainer(HADOOP_FILESET_KEYTAB, TMP_DIR + HADOOP_FILESET_KEYTAB);
}

private static void addKerberosConfig() {
AbstractIT.customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
private void addKerberosConfig() {
customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
customConfigs.put("gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
customConfigs.put("gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
import org.apache.gravitino.exceptions.FilesetAlreadyExistsException;
import org.apache.gravitino.exceptions.IllegalNameIdentifierException;
import org.apache.gravitino.file.Fileset;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.integration.test.util.ITUtils;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -67,7 +67,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class HadoopUserImpersonationIT extends AbstractIT {
public class HadoopUserImpersonationIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HadoopCatalogIT.class);

public static final String metalakeName =
Expand Down Expand Up @@ -111,7 +111,7 @@ private static void refreshKerberosConfig() {
}

@BeforeAll
public static void setup() throws Exception {
public void setup() throws Exception {
if (!isEmbedded()) {
return;
}
Expand Down Expand Up @@ -254,7 +254,7 @@ void testListFileSystem() throws Exception {
});
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
import org.apache.gravitino.hive.HiveClientPool;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.rel.Column;
import org.apache.gravitino.rel.Table;
Expand Down Expand Up @@ -108,13 +108,11 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CatalogHiveIT extends AbstractIT {
public class CatalogHiveIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(CatalogHiveIT.class);
public static final String metalakeName =
GravitinoITUtils.genRandomName("CatalogHiveIT_metalake");
Expand Down Expand Up @@ -253,7 +251,7 @@ public void stop() throws IOException {
LOG.error("Failed to close CloseableGroup", e);
}

AbstractIT.client = null;
client = null;
}

@AfterEach
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.apache.gravitino.client.KerberosTokenProvider;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.rel.Column;
import org.apache.gravitino.rel.TableChange;
Expand All @@ -63,7 +63,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class HiveUserAuthenticationIT extends AbstractIT {
public class HiveUserAuthenticationIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(HiveUserAuthenticationIT.class);

private static final ContainerSuite containerSuite = ContainerSuite.getInstance();
Expand Down Expand Up @@ -98,7 +98,7 @@ public class HiveUserAuthenticationIT extends AbstractIT {
private static final String HIVE_COL_NAME3 = "col3";

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
containerSuite.startKerberosHiveContainer();
kerberosHiveContainer = containerSuite.getKerberosHiveContainer();

Expand All @@ -119,11 +119,11 @@ public static void startIntegrationTest() throws Exception {
addKerberosConfig();

// Start Gravitino server
AbstractIT.startIntegrationTest();
super.startIntegrationTest();
}

@AfterAll
public static void stop() {
public void stop() {
// Reset the UGI
UserGroupInformation.reset();

Expand All @@ -132,7 +132,7 @@ public static void stop() {
System.clearProperty("java.security.krb5.conf");
System.clearProperty("sun.security.krb5.debug");

AbstractIT.client = null;
client = null;
}

private static void prepareKerberosConfig() throws Exception {
Expand Down Expand Up @@ -188,14 +188,12 @@ private static void refreshKerberosConfig() {
}
}

private static void addKerberosConfig() {
AbstractIT.customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
AbstractIT.customConfigs.put(
"gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
AbstractIT.customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
private void addKerberosConfig() {
customConfigs.put(Configs.AUTHENTICATORS.getKey(), "kerberos");
customConfigs.put("gravitino.authenticator.kerberos.principal", GRAVITINO_SERVER_PRINCIPAL);
customConfigs.put("gravitino.authenticator.kerberos.keytab", TMP_DIR + GRAVITINO_SERVER_KEYTAB);
customConfigs.put(SDK_KERBEROS_KEYTAB_KEY, TMP_DIR + GRAVITINO_CLIENT_KEYTAB);
customConfigs.put(SDK_KERBEROS_PRINCIPAL_KEY, GRAVITINO_CLIENT_PRINCIPAL);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
import org.apache.gravitino.hive.HiveClientPool;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.rel.Column;
import org.apache.gravitino.rel.Table;
Expand All @@ -64,7 +64,7 @@
import org.junit.jupiter.api.Test;

@Tag("gravitino-docker-test")
public class ProxyCatalogHiveIT extends AbstractIT {
public class ProxyCatalogHiveIT extends BaseIT {

public static final String METALAKE_NAME =
GravitinoITUtils.genRandomName("ProxyCatalogHiveIT_metalake");
Expand All @@ -88,10 +88,10 @@ public class ProxyCatalogHiveIT extends AbstractIT {
private static GravitinoAdminClient anotherClientWithNotExistingName;
private static Catalog anotherCatalog;
private static Catalog anotherCatalogWithUsername;
private static Catalog anotherCatatlogWithNotExistingName;
private static Catalog anotherCatalogWithNotExistingName;

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
originHadoopUser = System.getenv(HADOOP_USER_NAME);
setEnv(HADOOP_USER_NAME, null);

Expand All @@ -100,7 +100,7 @@ public static void startIntegrationTest() throws Exception {
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase());
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
super.startIntegrationTest();
containerSuite.startHiveContainer();
HIVE_METASTORE_URIS =
String.format(
Expand Down Expand Up @@ -137,13 +137,13 @@ public static void startIntegrationTest() throws Exception {
}

@AfterAll
public static void stop() {
public void stop() {
setEnv(HADOOP_USER_NAME, originHadoopUser);
anotherClient.close();
anotherClientWithUsername.close();
anotherClientWithNotExistingName.close();

AbstractIT.client = null;
client = null;
}

@Test
Expand Down Expand Up @@ -195,7 +195,7 @@ public void testOperateSchema() throws Exception {
Assertions.assertThrows(
RuntimeException.class,
() ->
anotherCatatlogWithNotExistingName
anotherCatalogWithNotExistingName
.asSchemas()
.createSchema("new_schema", comment, properties));
Assertions.assertTrue(e.getMessage().contains("AccessControlException Permission denied"));
Expand Down Expand Up @@ -256,7 +256,7 @@ public void testOperateTable() throws Exception {
Assertions.assertThrows(
RuntimeException.class,
() -> {
anotherCatatlogWithNotExistingName
anotherCatalogWithNotExistingName
.asTableCatalog()
.createTable(
anotherIdentWithNotExisting,
Expand Down Expand Up @@ -370,7 +370,7 @@ public void testOperatePartition() throws Exception {
Assertions.assertThrows(
RuntimeException.class,
() ->
anotherCatatlogWithNotExistingName
anotherCatalogWithNotExistingName
.asTableCatalog()
.loadTable(nameIdentifier)
.supportPartitions()
Expand All @@ -385,7 +385,7 @@ private Column[] createColumns() {
return new Column[] {col1, col2, col3};
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand Down Expand Up @@ -421,7 +421,7 @@ private static void loadCatalogWithAnotherClient() {
anotherCatalogWithUsername =
anotherClientWithUsername.loadMetalake(METALAKE_NAME).loadCatalog(CATALOG_NAME);

anotherCatatlogWithNotExistingName =
anotherCatalogWithNotExistingName =
anotherClientWithNotExistingName.loadMetalake(METALAKE_NAME).loadCatalog(CATALOG_NAME);
}

Expand Down
Loading

0 comments on commit e34dbea

Please sign in to comment.