Skip to content

Commit

Permalink
[#4953] feat(hudi-catalog): add Hudi catalog IT and enable module (#4965
Browse files Browse the repository at this point in the history
)

### What changes were proposed in this pull request?

add Hudi catalog IT and enable module

### Why are the changes needed?

Fix: #4953 

### Does this PR introduce _any_ user-facing change?

yes. The Hudi catalog is available now

### How was this patch tested?

ITs added
  • Loading branch information
mchades authored Oct 12, 2024
1 parent 8de31cd commit cc27756
Show file tree
Hide file tree
Showing 20 changed files with 813 additions and 53 deletions.
1 change: 1 addition & 0 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -780,6 +780,7 @@ tasks {
":catalogs:catalog-hive:copyLibAndConfig",
":catalogs:catalog-lakehouse-iceberg:copyLibAndConfig",
":catalogs:catalog-lakehouse-paimon:copyLibAndConfig",
"catalogs:catalog-lakehouse-hudi:copyLibAndConfig",
":catalogs:catalog-jdbc-doris:copyLibAndConfig",
":catalogs:catalog-jdbc-mysql:copyLibAndConfig",
":catalogs:catalog-jdbc-postgresql:copyLibAndConfig",
Expand Down
2 changes: 1 addition & 1 deletion catalogs/catalog-hive/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ dependencies {
exclude("com.google.code.findbugs", "sr305")
exclude("com.tdunning", "json")
exclude("com.zaxxer", "HikariCP")
exclude("io.dropwizard.metricss")
exclude("io.dropwizard.metrics")
exclude("javax.transaction", "transaction-api")
exclude("org.apache.ant")
exclude("org.apache.avro")
Expand Down
9 changes: 0 additions & 9 deletions catalogs/catalog-kafka/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -97,15 +97,6 @@ tasks.getByName("generateMetadataFileForMavenJavaPublication") {
}

tasks.test {
doFirst {
val testMode = project.properties["testMode"] as? String ?: "embedded"
if (testMode == "deploy") {
environment("GRAVITINO_HOME", project.rootDir.path + "/distribution/package")
} else if (testMode == "embedded") {
environment("GRAVITINO_HOME", project.rootDir.path)
}
}

val skipITs = project.hasProperty("skipITs")
if (skipITs) {
// Exclude integration tests
Expand Down
136 changes: 103 additions & 33 deletions catalogs/catalog-lakehouse-hudi/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -27,55 +27,38 @@ plugins {
val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString()
val fullSparkVersion: String = libs.versions.spark34.get()
val sparkVersion = fullSparkVersion.split(".").take(2).joinToString(".")
val hudiVersion = libs.versions.hudi.get()

dependencies {
implementation(project(":api")) {
exclude(group = "*")
exclude("*")
}
implementation(project(":common")) {
exclude(group = "*")
exclude("*")
}
implementation(project(":catalogs:hive-metastore-common"))
implementation(project(":core")) {
exclude(group = "*")
exclude("*")
}

implementation(libs.commons.collections3)
implementation(libs.commons.configuration1)
implementation(libs.htrace.core4)
implementation(libs.guava)
implementation(libs.hive2.exec) {
artifact {
classifier = "core"
}
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.protobuf")
exclude("org.apache.avro")
exclude("org.apache.ant")
exclude("org.apache.calcite")
exclude("org.apache.calcite.avatica")
exclude("org.apache.curator")
exclude("org.apache.derby")
exclude("org.apache.hadoop", "hadoop-yarn-server-resourcemanager")
exclude("org.apache.hive", "hive-llap-tez")
exclude("org.apache.hive", "hive-vector-code-gen")
exclude("org.apache.ivy")
exclude("org.apache.logging.log4j")
exclude("org.apache.zookeeper")
exclude("org.codehaus.groovy", "groovy-all")
exclude("org.datanucleus", "datanucleus-core")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
exclude("org.openjdk.jol")
exclude("org.pentaho")
exclude("org.slf4j")
implementation(libs.hadoop2.auth) {
exclude("*")
}
implementation(libs.woodstox.core)
implementation(libs.hive2.metastore) {
exclude("ant")
exclude("co.cask.tephra")
exclude("com.fasterxml.jackson.core", "jackson-core")
exclude("com.github.joshelser")
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.code.findbugs", "sr305")
exclude("com.tdunning", "json")
exclude("com.zaxxer", "HikariCP")
exclude("io.dropwizard.metricss")
exclude("io.dropwizard.metrics")
exclude("javax.transaction", "transaction-api")
exclude("org.apache.ant")
exclude("org.apache.avro")
Expand All @@ -95,16 +78,29 @@ dependencies {
implementation(libs.hadoop2.common) {
exclude("*")
}
implementation(libs.hadoop2.mapreduce.client.core) {
exclude("*")
}
implementation(libs.slf4j.api)
implementation(libs.thrift)

compileOnly(libs.lombok)

annotationProcessor(libs.lombok)

testImplementation(project(":catalogs:hive-metastore-common", "testArtifacts"))
testImplementation(project(":clients:client-java")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":integration-test-common", "testArtifacts"))
testImplementation(project(":server")) {
exclude("org.apache.logging.log4j")
}
testImplementation(project(":server-common")) {
exclude("org.apache.logging.log4j")
}

testImplementation(libs.bundles.log4j)
testImplementation(libs.bundles.jetty)
testImplementation(libs.bundles.jersey)
testImplementation(libs.commons.collections3)
testImplementation(libs.commons.configuration1)
testImplementation(libs.datanucleus.core)
Expand All @@ -115,12 +111,29 @@ dependencies {
testImplementation(libs.hadoop2.auth) {
exclude("*")
}
testImplementation(libs.hadoop2.hdfs)
testImplementation(libs.hadoop2.mapreduce.client.core) {
exclude("*")
}
testImplementation(libs.htrace.core4)
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.woodstox.core)
testImplementation(libs.mysql.driver)
testImplementation(libs.postgresql.driver)
testImplementation(libs.prometheus.dropwizard)
testImplementation("org.apache.spark:spark-hive_$scalaVersion:$fullSparkVersion") {
exclude("org.apache.hadoop")
exclude("io.dropwizard.metrics")
exclude("com.fasterxml.jackson.core")
exclude("com.fasterxml.jackson.module", "jackson-module-scala_2.12")
}
testImplementation("org.apache.spark:spark-sql_$scalaVersion:$fullSparkVersion") {
exclude("org.apache.avro")
exclude("org.apache.hadoop")
exclude("org.apache.zookeeper")
exclude("io.dropwizard.metrics")
exclude("org.rocksdb")
}
testImplementation(libs.testcontainers)
testImplementation("org.apache.spark:spark-hive_$scalaVersion:$fullSparkVersion") {
exclude("org.apache.hadoop")
exclude("io.dropwizard.metrics")
Expand All @@ -135,6 +148,63 @@ dependencies {
exclude("org.rocksdb")
}

testRuntimeOnly("org.apache.hudi:hudi-spark$sparkVersion-bundle_$scalaVersion:0.15.0")
testRuntimeOnly("org.apache.hudi:hudi-spark$sparkVersion-bundle_$scalaVersion:$hudiVersion")
testRuntimeOnly(libs.junit.jupiter.engine)
}

tasks {
val runtimeJars by registering(Copy::class) {
from(configurations.runtimeClasspath)
into("build/libs")
}

val copyCatalogLibs by registering(Copy::class) {
dependsOn("jar", "runtimeJars")
from("build/libs") {
exclude("guava-*.jar")
exclude("log4j-*.jar")
exclude("slf4j-*.jar")
}
into("$rootDir/distribution/package/catalogs/lakehouse-hudi/libs")
}

val copyCatalogConfig by registering(Copy::class) {
from("src/main/resources")
into("$rootDir/distribution/package/catalogs/lakehouse-hudi/conf")

include("lakehouse-hudi.conf")
include("hive-site.xml.template")

rename { original ->
if (original.endsWith(".template")) {
original.replace(".template", "")
} else {
original
}
}

exclude { details ->
details.file.isDirectory()
}

fileMode = 0b111101101
}

register("copyLibAndConfig", Copy::class) {
dependsOn(copyCatalogLibs, copyCatalogConfig)
}
}

tasks.test {
val skipITs = project.hasProperty("skipITs")
if (skipITs) {
// Exclude integration tests
exclude("**/integration/test/**")
} else {
dependsOn(tasks.jar)
}
}

tasks.getByName("generateMetadataFileForMavenJavaPublication") {
dependsOn("runtimeJars")
}
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ public void testConnection(
Map<String, String> properties)
throws Exception {
try {
hudiCatalogBackendOps.listSchemas(null);
hudiCatalogBackendOps.listSchemas(
Namespace.of(catalogIdent.namespace().level(0), catalogIdent.name()));
} catch (Exception e) {
throw new ConnectionFailedException(
e, "Failed to run listSchemas on Hudi catalog: %s", e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<configuration>
</configuration>

Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#

# This file holds common configurations for Lakehouse-hudi catalog. The format of the key is
# 'gravitino.bypass.{hudi-inner-config-key}' and `hudi-inner-config-key` is the
# real key that pass to Lakehouse-hudi catalog.
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,9 @@ public void testTestConnection() throws Exception {
InMemoryBackendOps inMemoryBackendOps = new InMemoryBackendOps()) {
ops.hudiCatalogBackendOps = inMemoryBackendOps;

Assertions.assertDoesNotThrow(() -> ops.testConnection(null, null, null, null, null));
Assertions.assertDoesNotThrow(
() ->
ops.testConnection(NameIdentifier.of("metalake", "catalog"), null, null, null, null));
}
}

Expand Down
Loading

0 comments on commit cc27756

Please sign in to comment.