From bb0092c680c6a78458651d1cc9c7ed26a739d4e8 Mon Sep 17 00:00:00 2001 From: xunliu Date: Tue, 3 Sep 2024 21:32:18 +0800 Subject: [PATCH] [#4759] feat(CI): Use Spark verify Ranger authorization Hive --- .../authorization-ranger/build.gradle.kts | 29 +- .../integration/test/RangerHiveE2EIT.java | 307 ++++++++++++------ .../ranger/integration/test/RangerHiveIT.java | 182 +---------- .../ranger/integration/test/RangerITEnv.java | 49 ++- .../src/test/resources/log4j2.properties | 73 +++++ .../ranger-spark-security.xml.template | 45 +++ build.gradle.kts | 7 +- .../gravitino/catalog/hive/TestHiveTable.java | 4 +- .../lakehouse/iceberg/TestIcebergTable.java | 4 +- integration-test-common/build.gradle.kts | 32 +- .../integration/test/util/AbstractIT.java | 7 +- .../gravitino/server/web/JettyServer.java | 9 + spark-connector/v3.3/spark/build.gradle.kts | 5 +- 13 files changed, 455 insertions(+), 298 deletions(-) create mode 100644 authorizations/authorization-ranger/src/test/resources/log4j2.properties create mode 100644 authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template diff --git a/authorizations/authorization-ranger/build.gradle.kts b/authorizations/authorization-ranger/build.gradle.kts index efc20e6c8e1..1e8413fe35d 100644 --- a/authorizations/authorization-ranger/build.gradle.kts +++ b/authorizations/authorization-ranger/build.gradle.kts @@ -24,6 +24,10 @@ plugins { id("idea") } +val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString() +val sparkVersion: String = libs.versions.spark35.get() +val kyuubiVersion: String = libs.versions.kyuubi4spark35.get() + dependencies { implementation(project(":api")) { exclude(group = "*") @@ -62,7 +66,6 @@ dependencies { testImplementation(project(":common")) testImplementation(project(":clients:client-java")) - testImplementation(project(":server")) testImplementation(project(":catalogs:catalog-common")) testImplementation(project(":integration-test-common", "testArtifacts")) testImplementation(libs.junit.jupiter.api) @@ -80,13 +83,24 @@ dependencies { exclude("org.elasticsearch.plugin") exclude("javax.ws.rs") exclude("org.apache.ranger", "ranger-plugin-classloader") + exclude("com.amazonaws", "aws-java-sdk-bundle") + exclude("org.eclipse.jetty") + exclude("org.apache.hadoop") } - testImplementation(libs.hive2.jdbc) { - exclude("org.slf4j") - exclude("org.eclipse.jetty.aggregate") - } + testImplementation(libs.h2db) testImplementation(libs.mysql.driver) testImplementation(libs.postgresql.driver) + testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") + testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion") { + exclude("org.apache.avro") + exclude("org.apache.hadoop") + exclude("org.apache.zookeeper") + exclude("io.dropwizard.metrics") + exclude("org.rocksdb") + } + testImplementation("org.apache.kyuubi:kyuubi-spark-authz_$scalaVersion:$kyuubiVersion") { + exclude("com.sun.jersey") + } } tasks { @@ -96,7 +110,7 @@ tasks { } val copyAuthorizationLibs by registering(Copy::class) { - dependsOn("jar", "runtimeJars") + dependsOn("jar", runtimeJars) from("build/libs") { exclude("guava-*.jar") exclude("log4j-*.jar") @@ -120,4 +134,7 @@ tasks.test { } else { dependsOn(tasks.jar) } + doFirst { + environment("HADOOP_USER_NAME", "gravitino") + } } diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java index 89ecbc849ad..0a823e3f4de 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java @@ -19,7 +19,8 @@ package org.apache.gravitino.authorization.ranger.integration.test; import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER; -import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_ADMIN_URL; +import static org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName; +import static org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE; import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE; import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD; import static org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME; @@ -29,17 +30,23 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.Map; +import org.apache.commons.io.IOUtils; import org.apache.gravitino.Catalog; import org.apache.gravitino.Configs; import org.apache.gravitino.MetadataObject; -import org.apache.gravitino.NameIdentifier; import org.apache.gravitino.Schema; -import org.apache.gravitino.auth.AuthConstants; +import org.apache.gravitino.auth.AuthenticatorType; import org.apache.gravitino.authorization.Privileges; -import org.apache.gravitino.authorization.Role; import org.apache.gravitino.authorization.SecurableObject; import org.apache.gravitino.authorization.SecurableObjects; import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin; @@ -50,18 +57,12 @@ import org.apache.gravitino.integration.test.container.RangerContainer; import org.apache.gravitino.integration.test.util.AbstractIT; import org.apache.gravitino.integration.test.util.GravitinoITUtils; -import org.apache.gravitino.rel.Column; -import org.apache.gravitino.rel.Table; -import org.apache.gravitino.rel.expressions.NamedReference; -import org.apache.gravitino.rel.expressions.distributions.Distribution; -import org.apache.gravitino.rel.expressions.distributions.Distributions; -import org.apache.gravitino.rel.expressions.distributions.Strategy; -import org.apache.gravitino.rel.expressions.sorts.NullOrdering; -import org.apache.gravitino.rel.expressions.sorts.SortDirection; -import org.apache.gravitino.rel.expressions.sorts.SortOrder; -import org.apache.gravitino.rel.expressions.sorts.SortOrders; -import org.apache.gravitino.rel.expressions.transforms.Transforms; -import org.apache.gravitino.rel.types.Types; +import org.apache.gravitino.meta.AuditInfo; +import org.apache.gravitino.meta.RoleEntity; +import org.apache.gravitino.meta.UserEntity; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.SparkSession; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; @@ -76,63 +77,206 @@ public class RangerHiveE2EIT extends AbstractIT { private static RangerAuthorizationPlugin rangerAuthPlugin; public static final String metalakeName = - GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase(); + GravitinoITUtils.genRandomName("RangerHiveE2EIT_metalake").toLowerCase(); public static final String catalogName = - GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase(); + GravitinoITUtils.genRandomName("RangerHiveE2EIT_catalog").toLowerCase(); public static final String schemaName = - GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase(); - public static final String tableName = - GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase(); - - public static final String HIVE_COL_NAME1 = "hive_col_name1"; - public static final String HIVE_COL_NAME2 = "hive_col_name2"; - public static final String HIVE_COL_NAME3 = "hive_col_name3"; + GravitinoITUtils.genRandomName("RangerHiveE2EIT_schema").toLowerCase(); private static GravitinoMetalake metalake; private static Catalog catalog; private static final String provider = "hive"; private static String HIVE_METASTORE_URIS; + private static SparkSession sparkSession = null; + private final AuditInfo auditInfo = + AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build(); + private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME"; + private static final String TEST_USER_NAME = "e2e_it_user"; + + private static final String SQL_SHOW_DATABASES = + String.format("SHOW DATABASES like '%s'", schemaName); + + private static String RANGER_ADMIN_URL = null; + @BeforeAll public static void startIntegrationTest() throws Exception { + // Enable Gravitino Authorization mode Map configs = Maps.newHashMap(); configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true)); - configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER); + configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME); + configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase()); + configs.put("SimpleAuthUserName", TEST_USER_NAME); registerCustomConfigs(configs); AbstractIT.startIntegrationTest(); RangerITEnv.setup(); - containerSuite.startHiveContainer(); + RangerITEnv.startHiveRangerContainer(); + + RANGER_ADMIN_URL = + String.format( + "http://%s:%d", + containerSuite.getRangerContainer().getContainerIpAddress(), RANGER_SERVER_PORT); + HIVE_METASTORE_URIS = String.format( "thrift://%s:%d", - containerSuite.getHiveContainer().getContainerIpAddress(), + containerSuite.getHiveRangerContainer().getContainerIpAddress(), HiveContainer.HIVE_METASTORE_PORT); + generateRangerSparkSecurityXML(); + + sparkSession = + SparkSession.builder() + .master("local[1]") + .appName("Hive Catalog integration test") + .config("hive.metastore.uris", HIVE_METASTORE_URIS) + .config( + "spark.sql.warehouse.dir", + String.format( + "hdfs://%s:%d/user/hive/warehouse", + containerSuite.getHiveRangerContainer().getContainerIpAddress(), + HiveContainer.HDFS_DEFAULTFS_PORT)) + .config("spark.sql.storeAssignmentPolicy", "LEGACY") + .config("mapreduce.input.fileinputformat.input.dir.recursive", "true") + .config( + "spark.sql.extensions", + "org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension") + .enableHiveSupport() + .getOrCreate(); + createMetalake(); createCatalogAndRangerAuthPlugin(); - createSchema(); - createHiveTable(); + } + + private static void generateRangerSparkSecurityXML() throws IOException { + String templatePath = + String.join( + File.separator, + System.getenv("GRAVITINO_ROOT_DIR"), + "authorizations", + "authorization-ranger", + "src", + "test", + "resources", + "ranger-spark-security.xml.template"); + String xmlPath = + String.join( + File.separator, + System.getenv("GRAVITINO_ROOT_DIR"), + "authorizations", + "authorization-ranger", + "build", + "resources", + "test", + "ranger-spark-security.xml"); + + FileInputStream inputStream = new FileInputStream(templatePath); + String templateContext = null; + try { + templateContext = IOUtils.toString(inputStream, StandardCharsets.UTF_8); + + templateContext = templateContext.replace("__REPLACE__RANGER_ADMIN_URL", RANGER_ADMIN_URL); + templateContext = + templateContext.replace( + "__REPLACE__RANGER_HIVE_REPO_NAME", RangerITEnv.RANGER_HIVE_REPO_NAME); + } finally { + inputStream.close(); + } + + FileOutputStream outputStream = new FileOutputStream(xmlPath); + try { + IOUtils.write(templateContext, outputStream, StandardCharsets.UTF_8); + } finally { + outputStream.close(); + } } @AfterAll public static void stop() throws IOException { + if (client != null) { + Arrays.stream(catalog.asSchemas().listSchemas()) + .filter(schema -> !schema.equals("default")) + .forEach( + (schema -> { + catalog.asSchemas().dropSchema(schema, true); + })); + Arrays.stream(metalake.listCatalogs()) + .forEach( + (catalogName -> { + metalake.dropCatalog(catalogName); + })); + client.dropMetalake(metalakeName); + } + if (sparkSession != null) { + sparkSession.close(); + } + try { + closer.close(); + } catch (Exception e) { + LOG.error("Failed to close CloseableGroup", e); + } + AbstractIT.client = null; } @Test - void testCreateRole() { - String roleName = RangerITEnv.currentFunName(); - Map properties = Maps.newHashMap(); - properties.put("k1", "v1"); + void testAllowUseSchemaPrivilege() throws InterruptedException { + // First, create a schema use Gravitino client + createSchema(); + + // Use Spark to show this databases(schema) + Dataset dataset1 = sparkSession.sql(SQL_SHOW_DATABASES); + dataset1.show(); + List rows1 = dataset1.collectAsList(); + // The schema should not be shown, because the user does not have the permission + Assertions.assertEquals( + 0, rows1.stream().filter(row -> row.getString(0).equals(schemaName)).count()); - SecurableObject table1 = + // Create a role with CREATE_SCHEMA privilege + SecurableObject securableObject1 = SecurableObjects.parse( - String.format("%s.%s.%s", catalogName, schemaName, tableName), - MetadataObject.Type.TABLE, - Lists.newArrayList(Privileges.SelectTable.allow())); - Role role = metalake.createRole(roleName, properties, Lists.newArrayList(table1)); - RangerITEnv.verifyRoleInRanger(rangerAuthPlugin, role); + String.format("%s.%s", catalogName, schemaName), + MetadataObject.Type.SCHEMA, + Lists.newArrayList(Privileges.CreateSchema.allow())); + RoleEntity role = + RoleEntity.builder() + .withId(1L) + .withName(currentFunName()) + .withAuditInfo(auditInfo) + .withSecurableObjects(Lists.newArrayList(securableObject1)) + .build(); + rangerAuthPlugin.onRoleCreated(role); + + // Granted this role to the spark execution user `HADOOP_USER_NAME` + String userName1 = System.getenv(HADOOP_USER_NAME); + UserEntity userEntity1 = + UserEntity.builder() + .withId(1L) + .withName(userName1) + .withRoles(Collections.emptyList()) + .withAuditInfo(auditInfo) + .build(); + Assertions.assertTrue( + rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role), userEntity1)); + + // After Ranger Authorization, Must wait a period of time for the Ranger Spark plugin to update + // the policy Sleep time must be greater than the policy update interval + // (ranger.plugin.spark.policy.pollIntervalMs) in the + // `resources/ranger-spark-security.xml.template` + Thread.sleep(1000L); + + // Use Spark to show this databases(schema) again + Dataset dataset2 = sparkSession.sql(SQL_SHOW_DATABASES); + dataset2.show(100, 100); + List rows2 = dataset2.collectAsList(); + rows2.stream() + .filter(row -> row.getString(0).equals(schemaName)) + .findFirst() + .orElseThrow(() -> new IllegalStateException("Database not found: " + schemaName)); + // The schema should be shown, because the user has the permission + Assertions.assertEquals( + 1, rows2.stream().filter(row -> row.getString(0).equals(schemaName)).count()); } private static void createMetalake() { @@ -153,31 +297,34 @@ private static void createCatalogAndRangerAuthPlugin() { "hive", ImmutableMap.of( AuthorizationPropertiesMeta.RANGER_ADMIN_URL, - String.format( - "http://%s:%d", - containerSuite.getRangerContainer().getContainerIpAddress(), - RangerContainer.RANGER_SERVER_PORT), - AuthorizationPropertiesMeta.RANGER_AUTH_TYPE, + RANGER_ADMIN_URL, + RANGER_AUTH_TYPE, RangerContainer.authType, - AuthorizationPropertiesMeta.RANGER_USERNAME, + RANGER_USERNAME, RangerContainer.rangerUserName, - AuthorizationPropertiesMeta.RANGER_PASSWORD, + RANGER_PASSWORD, RangerContainer.rangerPassword, - AuthorizationPropertiesMeta.RANGER_SERVICE_NAME, + RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME)); - Map properties = Maps.newHashMap(); - properties.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS); - properties.put(AUTHORIZATION_PROVIDER, "ranger"); - properties.put(RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME); - properties.put( - RANGER_ADMIN_URL, - String.format( - "http://localhost:%s", - containerSuite.getRangerContainer().getMappedPort(RANGER_SERVER_PORT))); - properties.put(RANGER_AUTH_TYPE, RangerContainer.authType); - properties.put(RANGER_USERNAME, RangerContainer.rangerUserName); - properties.put(RANGER_PASSWORD, RangerContainer.rangerPassword); + Map properties = + ImmutableMap.of( + HiveConstants.METASTORE_URIS, + HIVE_METASTORE_URIS, + IMPERSONATION_ENABLE, + "true", + AUTHORIZATION_PROVIDER, + "ranger", + RANGER_SERVICE_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, + AuthorizationPropertiesMeta.RANGER_ADMIN_URL, + RANGER_ADMIN_URL, + RANGER_AUTH_TYPE, + RangerContainer.authType, + RANGER_USERNAME, + RangerContainer.rangerUserName, + RANGER_PASSWORD, + RangerContainer.rangerPassword); metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties); catalog = metalake.loadCatalog(catalogName); @@ -192,7 +339,7 @@ private static void createSchema() { "location", String.format( "hdfs://%s:%d/user/hive/warehouse/%s.db", - containerSuite.getHiveContainer().getContainerIpAddress(), + containerSuite.getHiveRangerContainer().getContainerIpAddress(), HiveContainer.HDFS_DEFAULTFS_PORT, schemaName.toLowerCase())); String comment = "comment"; @@ -201,42 +348,4 @@ private static void createSchema() { Schema loadSchema = catalog.asSchemas().loadSchema(schemaName); Assertions.assertEquals(schemaName.toLowerCase(), loadSchema.name()); } - - public static void createHiveTable() { - // Create table from Gravitino API - Column[] columns = createColumns(); - NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName); - - Distribution distribution = - Distributions.of(Strategy.EVEN, 10, NamedReference.field(HIVE_COL_NAME1)); - - final SortOrder[] sortOrders = - new SortOrder[] { - SortOrders.of( - NamedReference.field(HIVE_COL_NAME2), - SortDirection.DESCENDING, - NullOrdering.NULLS_FIRST) - }; - - Map properties = ImmutableMap.of("key1", "val1", "key2", "val2"); - Table createdTable = - catalog - .asTableCatalog() - .createTable( - nameIdentifier, - columns, - "table_comment", - properties, - Transforms.EMPTY_TRANSFORM, - distribution, - sortOrders); - LOG.info("Table created: {}", createdTable); - } - - private static Column[] createColumns() { - Column col1 = Column.of(HIVE_COL_NAME1, Types.ByteType.get(), "col_1_comment"); - Column col2 = Column.of(HIVE_COL_NAME2, Types.DateType.get(), "col_2_comment"); - Column col3 = Column.of(HIVE_COL_NAME3, Types.StringType.get(), "col_3_comment"); - return new Column[] {col1, col2, col3}; - } } diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java index 844b89b3008..c2b1d99cc1c 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java @@ -24,11 +24,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -49,7 +44,6 @@ import org.apache.gravitino.authorization.ranger.reference.RangerDefines; import org.apache.gravitino.connector.AuthorizationPropertiesMeta; import org.apache.gravitino.integration.test.container.ContainerSuite; -import org.apache.gravitino.integration.test.container.HiveContainer; import org.apache.gravitino.integration.test.container.RangerContainer; import org.apache.gravitino.integration.test.util.GravitinoITUtils; import org.apache.gravitino.meta.AuditInfo; @@ -70,35 +64,15 @@ public class RangerHiveIT { private static final Logger LOG = LoggerFactory.getLogger(RangerHiveIT.class); private static final ContainerSuite containerSuite = ContainerSuite.getInstance(); - private static Connection adminConnection; - private static Connection anonymousConnection; - private static final String adminUser = "gravitino"; - private static final String anonymousUser = "anonymous"; private static RangerAuthorizationPlugin rangerAuthPlugin; - private static RangerHelper rangerPolicyHelper; + private static RangerHelper rangerHelper; private final AuditInfo auditInfo = AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build(); @BeforeAll public static void setup() { RangerITEnv.setup(); - - containerSuite.startHiveRangerContainer( - new HashMap<>( - ImmutableMap.of( - HiveContainer.HIVE_RUNTIME_VERSION, - HiveContainer.HIVE3, - RangerContainer.DOCKER_ENV_RANGER_SERVER_URL, - String.format( - "http://%s:%d", - containerSuite.getRangerContainer().getContainerIpAddress(), - RangerContainer.RANGER_SERVER_PORT), - RangerContainer.DOCKER_ENV_RANGER_HIVE_REPOSITORY_NAME, - RangerITEnv.RANGER_HIVE_REPO_NAME, - RangerContainer.DOCKER_ENV_RANGER_HDFS_REPOSITORY_NAME, - RangerITEnv.RANGER_HDFS_REPO_NAME, - HiveContainer.HADOOP_USER_NAME, - adminUser))); + RangerITEnv.startHiveRangerContainer(); rangerAuthPlugin = new RangerAuthorizationPlugin( @@ -117,21 +91,7 @@ public static void setup() { RangerContainer.rangerPassword, AuthorizationPropertiesMeta.RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME)); - rangerPolicyHelper = new RangerHelper(rangerAuthPlugin, "hive"); - - // Create hive connection - String url = - String.format( - "jdbc:hive2://%s:%d/default", - containerSuite.getHiveRangerContainer().getContainerIpAddress(), - HiveContainer.HIVE_SERVICE_PORT); - try { - Class.forName("org.apache.hive.jdbc.HiveDriver"); - adminConnection = DriverManager.getConnection(url, adminUser, ""); - anonymousConnection = DriverManager.getConnection(url, anonymousUser, ""); - } catch (ClassNotFoundException | SQLException e) { - throw new RuntimeException(e); - } + rangerHelper = new RangerHelper(rangerAuthPlugin, "hive"); } /** @@ -190,7 +150,7 @@ public void testOnRoleDeleted() { role.securableObjects().stream() .forEach( securableObject -> - Assertions.assertNull(rangerPolicyHelper.findManagedPolicy(securableObject))); + Assertions.assertNull(rangerHelper.findManagedPolicy(securableObject))); } @Test @@ -212,7 +172,7 @@ public void testOnRoleDeleted2() { role.securableObjects().stream() .forEach( securableObject -> - Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject))); + Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject))); } @Test @@ -262,14 +222,14 @@ public void testFindManagedPolicy() { String.format("catalog.%s3.tab1", dbName), MetadataObject.Type.TABLE, Lists.newArrayList(Privileges.CreateTable.allow())); - Assertions.assertNull(rangerPolicyHelper.findManagedPolicy(securableObject1)); + Assertions.assertNull(rangerHelper.findManagedPolicy(securableObject1)); // Add a policy for `db3.tab1` createHivePolicy( Lists.newArrayList(String.format("%s3", dbName), "tab1"), GravitinoITUtils.genRandomName(currentFunName())); // findManagedPolicy function use precise search, so return not null - Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject1)); + Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject1)); } static void createHivePolicy(List metaObjects, String roleName) { @@ -467,7 +427,7 @@ public void testRoleChangeCombinedOperation() { role.securableObjects().stream() .forEach( securableObject -> - Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject))); + Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject))); verifyOwnerInRanger(oldMetadataObject, Lists.newArrayList(userName)); } @@ -1075,15 +1035,15 @@ public void testCombinationOperation() { role1.securableObjects().stream() .forEach( securableObject -> - Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject))); + Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject))); role2.securableObjects().stream() .forEach( securableObject -> - Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject))); + Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject))); role3.securableObjects().stream() .forEach( securableObject -> - Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject))); + Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject))); } /** Verify the Gravitino role in Ranger service */ @@ -1180,124 +1140,4 @@ private void verifyOwnerInRanger(MetadataObject metadataObject) { private void verifyOwnerInRanger(MetadataObject metadataObject, List includeUsers) { verifyOwnerInRanger(metadataObject, includeUsers, null, null, null); } - - /** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */ - static void allowAnyoneAccessHDFS() { - String policyName = currentFunName(); - try { - if (null != RangerITEnv.rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS, policyName)) { - return; - } - } catch (RangerServiceException e) { - // If the policy doesn't exist, we will create it - } - - Map policyResourceMap = - ImmutableMap.of(RangerDefines.RESOURCE_PATH, new RangerPolicy.RangerPolicyResource("/*")); - RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem(); - policyItem.setUsers(Arrays.asList(RangerDefines.CURRENT_USER)); - policyItem.setAccesses( - Arrays.asList( - new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_READ), - new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_WRITE), - new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_EXECUTE))); - RangerITEnv.updateOrCreateRangerPolicy( - RangerDefines.SERVICE_TYPE_HDFS, - RangerITEnv.RANGER_HDFS_REPO_NAME, - policyName, - policyResourceMap, - Collections.singletonList(policyItem)); - } - - /** - * Hive must have this policy Allow anyone can access information schema to show `database`, - * `tables` and `columns` - */ - static void allowAnyoneAccessInformationSchema() { - String policyName = currentFunName(); - try { - if (null != RangerITEnv.rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE, policyName)) { - return; - } - } catch (RangerServiceException e) { - // If the policy doesn't exist, we will create it - } - - Map policyResourceMap = - ImmutableMap.of( - RangerDefines.RESOURCE_DATABASE, - new RangerPolicy.RangerPolicyResource("information_schema"), - RangerDefines.RESOURCE_TABLE, - new RangerPolicy.RangerPolicyResource("*"), - RangerDefines.RESOURCE_COLUMN, - new RangerPolicy.RangerPolicyResource("*")); - RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem(); - policyItem.setGroups(Arrays.asList(RangerDefines.PUBLIC_GROUP)); - policyItem.setAccesses( - Arrays.asList( - new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT))); - RangerITEnv.updateOrCreateRangerPolicy( - RangerDefines.SERVICE_TYPE_HIVE, - RangerITEnv.RANGER_HIVE_REPO_NAME, - policyName, - policyResourceMap, - Collections.singletonList(policyItem)); - } - - @Test - public void testCreateDatabase() throws Exception { - String dbName = currentFunName().toLowerCase(); // Hive database name is case-insensitive - - // Only allow admin user to operation database `db1` - // Other users can't see the database `db1` - Map policyResourceMap = - ImmutableMap.of( - RangerDefines.RESOURCE_DATABASE, new RangerPolicy.RangerPolicyResource(dbName)); - RangerPolicy.RangerPolicyItem policyItem = new RangerPolicy.RangerPolicyItem(); - policyItem.setUsers(Arrays.asList(adminUser)); - policyItem.setAccesses( - Arrays.asList(new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_ALL))); - RangerITEnv.updateOrCreateRangerPolicy( - RangerDefines.SERVICE_TYPE_HIVE, - RangerITEnv.RANGER_HIVE_REPO_NAME, - "testAllowShowDatabase", - policyResourceMap, - Collections.singletonList(policyItem)); - - Statement adminStmt = adminConnection.createStatement(); - adminStmt.execute(String.format("CREATE DATABASE %s", dbName)); - String sql = "show databases"; - ResultSet adminRS = adminStmt.executeQuery(sql); - List adminDbs = new ArrayList<>(); - while (adminRS.next()) { - adminDbs.add(adminRS.getString(1)); - } - Assertions.assertTrue(adminDbs.contains(dbName), "adminDbs : " + adminDbs); - - // Anonymous user can't see the database `db1` - Statement anonymousStmt = anonymousConnection.createStatement(); - ResultSet anonymousRS = anonymousStmt.executeQuery(sql); - List anonymousDbs = new ArrayList<>(); - while (anonymousRS.next()) { - anonymousDbs.add(anonymousRS.getString(1)); - } - Assertions.assertFalse(anonymousDbs.contains(dbName), "anonymous : " + anonymousDbs); - - // Allow anonymous user to see the database `db1` - policyItem.setUsers(Arrays.asList(adminUser, anonymousUser)); - policyItem.setAccesses( - Arrays.asList(new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_ALL))); - RangerITEnv.updateOrCreateRangerPolicy( - RangerDefines.SERVICE_TYPE_HIVE, - RangerITEnv.RANGER_HIVE_REPO_NAME, - "testAllowShowDatabase", - policyResourceMap, - Collections.singletonList(policyItem)); - anonymousRS = anonymousStmt.executeQuery(sql); - anonymousDbs.clear(); - while (anonymousRS.next()) { - anonymousDbs.add(anonymousRS.getString(1)); - } - Assertions.assertTrue(anonymousDbs.contains(dbName)); - } } diff --git a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java index 2808a2b796d..cbeb94ffa11 100644 --- a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java +++ b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java @@ -33,6 +33,7 @@ import org.apache.gravitino.authorization.ranger.reference.RangerDefines; import org.apache.gravitino.integration.test.container.ContainerSuite; import org.apache.gravitino.integration.test.container.HiveContainer; +import org.apache.gravitino.integration.test.container.RangerContainer; import org.apache.gravitino.integration.test.container.TrinoContainer; import org.apache.ranger.RangerClient; import org.apache.ranger.RangerServiceException; @@ -53,6 +54,7 @@ public class RangerITEnv { private static final String RANGER_HIVE_TYPE = "hive"; protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev"; private static final String RANGER_HDFS_TYPE = "hdfs"; + protected static final String HADOOP_USER_NAME = "gravitino"; protected static RangerClient rangerClient; private static volatile boolean initRangerService = false; private static final ContainerSuite containerSuite = ContainerSuite.getInstance(); @@ -88,11 +90,50 @@ public static void cleanup() { } } + static void initRangerITEnvForManual(String rangerUrl) { + rangerClient = + new RangerClient( + rangerUrl, + RangerContainer.authType, + RangerContainer.rangerUserName, + RangerContainer.rangerPassword, + null); + if (!initRangerService) { + synchronized (RangerITEnv.class) { + // No IP address set, no impact on testing + createRangerHdfsRepository("", true); + createRangerHiveRepository("", true); + allowAnyoneAccessHDFS(); + allowAnyoneAccessInformationSchema(); + initRangerService = true; + } + } + } + + static void startHiveRangerContainer() { + containerSuite.startHiveRangerContainer( + new HashMap<>( + ImmutableMap.of( + HiveContainer.HIVE_RUNTIME_VERSION, + HiveContainer.HIVE3, + RangerContainer.DOCKER_ENV_RANGER_SERVER_URL, + String.format( + "http://%s:%d", + containerSuite.getRangerContainer().getContainerIpAddress(), + RangerContainer.RANGER_SERVER_PORT), + RangerContainer.DOCKER_ENV_RANGER_HIVE_REPOSITORY_NAME, + RangerITEnv.RANGER_HIVE_REPO_NAME, + RangerContainer.DOCKER_ENV_RANGER_HDFS_REPOSITORY_NAME, + RangerITEnv.RANGER_HDFS_REPO_NAME, + HiveContainer.HADOOP_USER_NAME, + HADOOP_USER_NAME))); + } + /** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */ static void allowAnyoneAccessHDFS() { String policyName = currentFunName(); try { - if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS, policyName)) { + if (null != rangerClient.getPolicy(RANGER_HDFS_REPO_NAME, policyName)) { return; } } catch (RangerServiceException e) { @@ -110,7 +151,7 @@ static void allowAnyoneAccessHDFS() { new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_WRITE), new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_EXECUTE))); updateOrCreateRangerPolicy( - RangerDefines.SERVICE_TYPE_HDFS, + RANGER_HDFS_TYPE, RANGER_HDFS_REPO_NAME, policyName, policyResourceMap, @@ -124,7 +165,7 @@ static void allowAnyoneAccessHDFS() { static void allowAnyoneAccessInformationSchema() { String policyName = currentFunName(); try { - if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE, policyName)) { + if (null != rangerClient.getPolicy(RANGER_HIVE_REPO_NAME, policyName)) { return; } } catch (RangerServiceException e) { @@ -146,7 +187,7 @@ static void allowAnyoneAccessInformationSchema() { Arrays.asList( new RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT))); updateOrCreateRangerPolicy( - RangerDefines.SERVICE_TYPE_HIVE, + RANGER_HIVE_TYPE, RANGER_HIVE_REPO_NAME, policyName, policyResourceMap, diff --git a/authorizations/authorization-ranger/src/test/resources/log4j2.properties b/authorizations/authorization-ranger/src/test/resources/log4j2.properties new file mode 100644 index 00000000000..8bda5f6e859 --- /dev/null +++ b/authorizations/authorization-ranger/src/test/resources/log4j2.properties @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Set to debug or trace if log4j initialization is failing +status = info + +# Name of the configuration +name = ConsoleLogConfig + +# Console appender configuration +appender.console.type = Console +appender.console.name = consoleLogger +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L - %m%n + +# Log files location +property.logPath = ${sys:gravitino.log.path:-build/authorization-ranger-integration-test.log} + +# File appender configuration +appender.file.type = File +appender.file.name = fileLogger +appender.file.fileName = ${logPath} +appender.file.layout.type = PatternLayout +appender.file.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n + +# Root logger level +rootLogger.level = info + +# Root logger referring to console and file appenders +rootLogger.appenderRef.stdout.ref = consoleLogger +rootLogger.appenderRef.file.ref = fileLogger + +# File appender configuration for testcontainers +appender.testcontainersFile.type = File +appender.testcontainersFile.name = testcontainersLogger +appender.testcontainersFile.fileName = build/testcontainers.log +appender.testcontainersFile.layout.type = PatternLayout +appender.testcontainersFile.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n + +# Logger for testcontainers +logger.testcontainers.name = org.testcontainers +logger.testcontainers.level = debug +logger.testcontainers.additivity = false +logger.testcontainers.appenderRef.file.ref = testcontainersLogger + +logger.tc.name = tc +logger.tc.level = debug +logger.tc.additivity = false +logger.tc.appenderRef.file.ref = testcontainersLogger + +logger.docker.name = com.github.dockerjava +logger.docker.level = warn +logger.docker.additivity = false +logger.docker.appenderRef.file.ref = testcontainersLogger + +logger.http.name = com.github.dockerjava.zerodep.shaded.org.apache.hc.client5.http.wire +logger.http.level = off diff --git a/authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template b/authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template new file mode 100644 index 00000000000..eb7f2b5e811 --- /dev/null +++ b/authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template @@ -0,0 +1,45 @@ + + + + ranger.plugin.spark.policy.rest.url + __REPLACE__RANGER_ADMIN_URL + + + + ranger.plugin.spark.service.name + __REPLACE__RANGER_HIVE_REPO_NAME + + + + ranger.plugin.spark.policy.cache.dir + /tmp/policycache + + + + ranger.plugin.spark.policy.pollIntervalMs + 500 + + + + ranger.plugin.spark.policy.source.impl + org.apache.ranger.admin.client.RangerAdminRESTClient + + + \ No newline at end of file diff --git a/build.gradle.kts b/build.gradle.kts index 1737902aff6..0a463a443ab 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -163,10 +163,15 @@ allprojects { // Default use MiniGravitino to run integration tests param.environment("GRAVITINO_ROOT_DIR", project.rootDir.path) param.environment("IT_PROJECT_DIR", project.buildDir.path) - param.environment("HADOOP_USER_NAME", "anonymous") param.environment("HADOOP_HOME", "/tmp") param.environment("PROJECT_VERSION", project.version) + // If the environment variable `HADOOP_USER_NAME` is not customized in submodule, + // then set it to "anonymous" + if (param.environment["HADOOP_USER_NAME"] == null) { + param.environment("HADOOP_USER_NAME", "anonymous") + } + // Gravitino CI Docker image param.environment("GRAVITINO_CI_HIVE_DOCKER_IMAGE", "apache/gravitino-ci:hive-0.1.13") param.environment("GRAVITINO_CI_KERBEROS_HIVE_DOCKER_IMAGE", "apache/gravitino-ci:kerberos-hive-0.1.5") diff --git a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java index 3c0be1f20ea..4aa5401a251 100644 --- a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java +++ b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java @@ -70,14 +70,14 @@ public class TestHiveTable extends MiniHiveMetastoreService { NameIdentifier.of(META_LAKE_NAME, HIVE_CATALOG_NAME, HIVE_SCHEMA_NAME); @BeforeAll - private static void setup() { + public static void setup() { hiveCatalog = initHiveCatalog(); hiveCatalogOperations = (HiveCatalogOperations) hiveCatalog.ops(); hiveSchema = initHiveSchema(); } @AfterEach - private void resetSchema() { + public void resetSchema() { hiveCatalogOperations.dropSchema(schemaIdent, true); hiveSchema = initHiveSchema(); } diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java index 28ffc707df3..862d382e050 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java @@ -76,13 +76,13 @@ public class TestIcebergTable { NameIdentifier.of(META_LAKE_NAME, ICEBERG_CATALOG_NAME, ICEBERG_SCHEMA_NAME); @BeforeAll - private static void setup() { + public static void setup() { initIcebergCatalog(); initIcebergSchema(); } @AfterEach - private void resetSchema() { + public void resetSchema() { NameIdentifier[] nameIdentifiers = icebergCatalogOperations.listTables( Namespace.of(ArrayUtils.add(schemaIdent.namespace().levels(), schemaIdent.name()))); diff --git a/integration-test-common/build.gradle.kts b/integration-test-common/build.gradle.kts index a25ad4cff8f..6c018e66822 100644 --- a/integration-test-common/build.gradle.kts +++ b/integration-test-common/build.gradle.kts @@ -16,9 +16,11 @@ * specific language governing permissions and limitations * under the License. */ +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar plugins { id("java") + alias(libs.plugins.shadow) } repositories { @@ -29,7 +31,9 @@ dependencies { testImplementation(project(":api")) testImplementation(project(":clients:client-java")) testImplementation(project(":common")) - testImplementation(project(":core")) + testImplementation(project(":core")) { + exclude("org.rocksdb") + } testImplementation(project(":server")) testImplementation(project(":server-common")) testImplementation(libs.bundles.jetty) @@ -53,19 +57,31 @@ dependencies { exclude("org.elasticsearch") exclude("org.elasticsearch.client") exclude("org.elasticsearch.plugin") + exclude("com.amazonaws", "aws-java-sdk-bundle") } - testImplementation(platform("org.junit:junit-bom:5.9.1")) testImplementation("org.junit.jupiter:junit-jupiter") + testImplementation(libs.bundles.jetty) + testImplementation(libs.bundles.jersey) } -tasks.test { - useJUnitPlatform() +val testShadowJar by tasks.registering(ShadowJar::class) { + isZip64 = true + configurations = listOf( + project.configurations.runtimeClasspath.get(), + project.configurations.testRuntimeClasspath.get() + ) + archiveClassifier.set("tests-shadow") + relocate("org.eclipse.jetty", "org.apache.gravitino.it.shaded.org.eclipse.jetty") + from(sourceSets["test"].output) } -val testJar by tasks.registering(Jar::class) { - archiveClassifier.set("tests") - from(sourceSets["test"].output) +tasks.jar { + dependsOn(testShadowJar) +} + +tasks.test { + useJUnitPlatform() } configurations { @@ -73,5 +89,5 @@ configurations { } artifacts { - add("testArtifacts", testJar) + add("testArtifacts", testShadowJar) } diff --git a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java index 6644e1f646c..71d92d6c931 100644 --- a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java +++ b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java @@ -340,7 +340,12 @@ public static void startIntegrationTest() throws Exception { if (authenticators.contains(AuthenticatorType.OAUTH.name().toLowerCase())) { client = GravitinoAdminClient.builder(serverUri).withOAuth(mockDataProvider).build(); } else if (authenticators.contains(AuthenticatorType.SIMPLE.name().toLowerCase())) { - client = GravitinoAdminClient.builder(serverUri).withSimpleAuth().build(); + String userName = customConfigs.get("SimpleAuthUserName"); + if (userName != null) { + client = GravitinoAdminClient.builder(serverUri).withSimpleAuth(userName).build(); + } else { + client = GravitinoAdminClient.builder(serverUri).withSimpleAuth().build(); + } } else if (authenticators.contains(AuthenticatorType.KERBEROS.name().toLowerCase())) { serverUri = "http://localhost:" + jettyServerConfig.getHttpPort(); client = null; diff --git a/server-common/src/main/java/org/apache/gravitino/server/web/JettyServer.java b/server-common/src/main/java/org/apache/gravitino/server/web/JettyServer.java index e81b4fa9e16..0ccd33359b5 100644 --- a/server-common/src/main/java/org/apache/gravitino/server/web/JettyServer.java +++ b/server-common/src/main/java/org/apache/gravitino/server/web/JettyServer.java @@ -260,6 +260,15 @@ private void initializeWebAppServletContextHandler() { servletContextHandler = new WebAppContext(); boolean isUnitTest = System.getenv("GRAVITINO_TEST") != null; + if (isUnitTest) { + /** + * Set the default descriptor to null to avoid using the Jetty class configurations in the + * default web.xml file, as these Jetty class names cannot be found in integration-test-common + * shadow mode. This causes Gravitino to fail to start. Similarly, this approach is supported + * for the future provision of individual miniGravitino modules. + */ + ((WebAppContext) servletContextHandler).setDefaultsDescriptor(null); + } // If in development/test mode, you can set `war` file or `web/dist` directory in the // `GRAVITINO_WAR` environment variable. diff --git a/spark-connector/v3.3/spark/build.gradle.kts b/spark-connector/v3.3/spark/build.gradle.kts index e209da55272..fccebaa5b45 100644 --- a/spark-connector/v3.3/spark/build.gradle.kts +++ b/spark-connector/v3.3/spark/build.gradle.kts @@ -60,10 +60,7 @@ dependencies { exclude("org.apache.logging.log4j") exclude("org.slf4j") } - testImplementation(project(":integration-test-common", "testArtifacts")) { - exclude("org.apache.logging.log4j") - exclude("org.slf4j") - } + testImplementation(project(":integration-test-common", "testArtifacts")) testImplementation(project(":server")) { exclude("org.apache.logging.log4j") exclude("org.slf4j")