Skip to content

Commit

Permalink
Merge branch 'main' of github.com:apache/gravitino into issue_4940
Browse files Browse the repository at this point in the history
  • Loading branch information
yuqi1129 committed Oct 11, 2024
2 parents 444d870 + 02c2ef0 commit 9ac9d3c
Show file tree
Hide file tree
Showing 47 changed files with 1,552 additions and 308 deletions.
4 changes: 2 additions & 2 deletions MAINTAINERS.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ something like that, all PRs should have related issues.
6. After PR is merged, please check the related issue:
- If the issue is not closed, please close it as fixed manually.
- Assign the issue "Assignees" to the PR author.
- Starting from 0.6.0, we will use the "labels" to manage the release versions, so please add
the corresponding labels to the issue. For example, if the issue is fixed in 0.6.0, please
- Starting from 0.6.0-incubating, we will use the "labels" to manage the release versions, so please add
the corresponding labels to the issue. For example, if the issue is fixed in 0.6.0-incubating, please
add the label "0.6.0". If the issue is fixed both in 0.6.0 and 0.5.1, please add both labels.

## Policy on backporting bug fixes
Expand Down
99 changes: 99 additions & 0 deletions catalogs/catalog-lakehouse-hudi/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,117 @@ plugins {
id("idea")
}

val scalaVersion: String = project.properties["scalaVersion"] as? String ?: extra["defaultScalaVersion"].toString()
val fullSparkVersion: String = libs.versions.spark34.get()
val sparkVersion = fullSparkVersion.split(".").take(2).joinToString(".")

dependencies {
implementation(project(":api")) {
exclude(group = "*")
}
implementation(project(":common")) {
exclude(group = "*")
}
implementation(project(":catalogs:hive-metastore-common"))
implementation(project(":core")) {
exclude(group = "*")
}

implementation(libs.guava)
implementation(libs.hive2.exec) {
artifact {
classifier = "core"
}
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.protobuf")
exclude("org.apache.avro")
exclude("org.apache.ant")
exclude("org.apache.calcite")
exclude("org.apache.calcite.avatica")
exclude("org.apache.curator")
exclude("org.apache.derby")
exclude("org.apache.hadoop", "hadoop-yarn-server-resourcemanager")
exclude("org.apache.hive", "hive-llap-tez")
exclude("org.apache.hive", "hive-vector-code-gen")
exclude("org.apache.ivy")
exclude("org.apache.logging.log4j")
exclude("org.apache.zookeeper")
exclude("org.codehaus.groovy", "groovy-all")
exclude("org.datanucleus", "datanucleus-core")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
exclude("org.openjdk.jol")
exclude("org.pentaho")
exclude("org.slf4j")
}
implementation(libs.hive2.metastore) {
exclude("ant")
exclude("co.cask.tephra")
exclude("com.github.joshelser")
exclude("com.google.code.findbugs", "jsr305")
exclude("com.google.code.findbugs", "sr305")
exclude("com.tdunning", "json")
exclude("com.zaxxer", "HikariCP")
exclude("io.dropwizard.metricss")
exclude("javax.transaction", "transaction-api")
exclude("org.apache.ant")
exclude("org.apache.avro")
exclude("org.apache.curator")
exclude("org.apache.derby")
exclude("org.apache.hadoop", "hadoop-yarn-server-resourcemanager")
exclude("org.apache.hbase")
exclude("org.apache.logging.log4j")
exclude("org.apache.parquet", "parquet-hadoop-bundle")
exclude("org.apache.zookeeper")
exclude("org.datanucleus")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
exclude("org.openjdk.jol")
exclude("org.slf4j")
}
implementation(libs.hadoop2.common) {
exclude("*")
}
implementation(libs.slf4j.api)
implementation(libs.thrift)

compileOnly(libs.lombok)

annotationProcessor(libs.lombok)

testImplementation(project(":catalogs:hive-metastore-common", "testArtifacts"))

testImplementation(libs.bundles.log4j)
testImplementation(libs.commons.collections3)
testImplementation(libs.commons.configuration1)
testImplementation(libs.datanucleus.core)
testImplementation(libs.datanucleus.api.jdo)
testImplementation(libs.datanucleus.rdbms)
testImplementation(libs.datanucleus.jdo)
testImplementation(libs.derby)
testImplementation(libs.hadoop2.auth) {
exclude("*")
}
testImplementation(libs.hadoop2.mapreduce.client.core) {
exclude("*")
}
testImplementation(libs.htrace.core4)
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.woodstox.core)
testImplementation("org.apache.spark:spark-hive_$scalaVersion:$fullSparkVersion") {
exclude("org.apache.hadoop")
exclude("io.dropwizard.metrics")
exclude("com.fasterxml.jackson.core")
exclude("com.fasterxml.jackson.module", "jackson-module-scala_2.12")
}
testImplementation("org.apache.spark:spark-sql_$scalaVersion:$fullSparkVersion") {
exclude("org.apache.avro")
exclude("org.apache.hadoop")
exclude("org.apache.zookeeper")
exclude("io.dropwizard.metrics")
exclude("org.rocksdb")
}

testRuntimeOnly("org.apache.hudi:hudi-spark$sparkVersion-bundle_$scalaVersion:0.15.0")
testRuntimeOnly(libs.junit.jupiter.engine)
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
package org.apache.gravitino.catalog.lakehouse.hudi;

import com.google.common.annotations.VisibleForTesting;
import java.util.Map;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.NameIdentifier;
Expand Down Expand Up @@ -54,7 +55,7 @@ public class HudiCatalogOperations implements CatalogOperations, SupportsSchemas

private static final Logger LOG = LoggerFactory.getLogger(HudiCatalogOperations.class);

private HudiCatalogBackendOps hudiCatalogBackendOps;
@VisibleForTesting HudiCatalogBackendOps hudiCatalogBackendOps;

/**
* Load the Hudi Catalog Backend and initialize the Hudi Catalog Operations.
Expand All @@ -69,7 +70,7 @@ public void initialize(
Map<String, String> config, CatalogInfo info, HasPropertyMetadata propertiesMetadata)
throws RuntimeException {
HudiCatalogBackend hudiCatalogBackend = CatalogUtils.loadHudiCatalogBackend(config);
hudiCatalogBackendOps = hudiCatalogBackend.catalogOps();
hudiCatalogBackendOps = hudiCatalogBackend.backendOps();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,43 @@
*/
package org.apache.gravitino.catalog.lakehouse.hudi;

import java.util.Collections;
import static org.apache.gravitino.connector.PropertyEntry.enumImmutablePropertyEntry;
import static org.apache.gravitino.connector.PropertyEntry.stringRequiredPropertyEntry;

import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.gravitino.catalog.lakehouse.hudi.backend.BackendType;
import org.apache.gravitino.connector.BaseCatalogPropertiesMetadata;
import org.apache.gravitino.connector.PropertyEntry;
import org.apache.gravitino.hive.ClientPropertiesMetadata;

public class HudiCatalogPropertiesMetadata extends BaseCatalogPropertiesMetadata {
public static final String CATALOG_BACKEND = "catalog-backend";
public static final String URI = "uri";
private static final ClientPropertiesMetadata CLIENT_PROPERTIES_METADATA =
new ClientPropertiesMetadata();

private static final Map<String, PropertyEntry<?>> PROPERTIES_METADATA =
ImmutableMap.<String, PropertyEntry<?>>builder()
.put(
CATALOG_BACKEND,
enumImmutablePropertyEntry(
CATALOG_BACKEND,
"Hudi catalog type choose properties",
true /* required */,
BackendType.class,
null /* defaultValue */,
false /* hidden */,
false /* reserved */))
.put(
URI,
stringRequiredPropertyEntry(
URI, "Hudi catalog uri config", false /* immutable */, false /* hidden */))
.putAll(CLIENT_PROPERTIES_METADATA.propertyEntries())
.build();

@Override
protected Map<String, PropertyEntry<?>> specificPropertyEntries() {
return Collections.emptyMap();
return PROPERTIES_METADATA;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.gravitino.catalog.lakehouse.hudi;

import lombok.EqualsAndHashCode;
import org.apache.gravitino.connector.BaseColumn;

/** A class representing a column in a Hudi table. */
@EqualsAndHashCode(callSuper = true)
public class HudiColumn extends BaseColumn {
/**
* Creates a new instance of {@link Builder}.
*
* @return The new instance.
*/
public static Builder builder() {
return new Builder();
}

private HudiColumn() {}

/** A builder class for constructing HudiColumn instances. */
public static class Builder extends BaseColumnBuilder<Builder, HudiColumn> {

/** Creates a new instance of {@link Builder}. */
private Builder() {}

/**
* Internal method to build a HudiColumn instance using the provided values.
*
* @return A new HudiColumn instance with the configured values.
*/
@Override
protected HudiColumn internalBuild() {
HudiColumn hudiColumn = new HudiColumn();

hudiColumn.name = name;
hudiColumn.comment = comment;
hudiColumn.dataType = dataType;
hudiColumn.nullable = nullable;
hudiColumn.defaultValue = defaultValue == null ? DEFAULT_VALUE_NOT_SET : defaultValue;
return hudiColumn;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -81,5 +81,10 @@ protected HudiSchema<T> internalBuild() {
* @return the HudiSchema
*/
protected abstract HudiSchema<T> buildFromSchema(T schema);

@Override
public HudiSchema<T> build() {
return internalBuild();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,27 @@
*/
package org.apache.gravitino.catalog.lakehouse.hudi;

import java.util.Collections;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.gravitino.connector.BasePropertiesMetadata;
import org.apache.gravitino.connector.PropertyEntry;

public class HudiSchemaPropertiesMetadata extends BasePropertiesMetadata {
public static final String LOCATION = "location";
private static final Map<String, PropertyEntry<?>> PROPERTIES_METADATA =
ImmutableMap.<String, PropertyEntry<?>>builder()
.put(
LOCATION,
PropertyEntry.stringOptionalPropertyEntry(
LOCATION,
"The directory for Hudi dataset storage",
false /* immutable */,
null /* default value */,
false /* hidden */))
.build();

@Override
protected Map<String, PropertyEntry<?>> specificPropertyEntries() {
return Collections.emptyMap();
return PROPERTIES_METADATA;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -68,5 +68,10 @@ protected HudiTable<T> internalBuild() {
* @return the HudiTable
*/
protected abstract HudiTable<T> buildFromTable(T backendTable);

@Override
public HudiTable<T> build() {
return internalBuild();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,46 @@
*/
package org.apache.gravitino.catalog.lakehouse.hudi;

import java.util.Collections;
import static org.apache.gravitino.connector.PropertyEntry.stringImmutablePropertyEntry;
import static org.apache.gravitino.connector.PropertyEntry.stringReservedPropertyEntry;

import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.gravitino.connector.BasePropertiesMetadata;
import org.apache.gravitino.connector.PropertyEntry;

public class HudiTablePropertiesMetadata extends BasePropertiesMetadata {
public static final String COMMENT = "comment";
public static final String LOCATION = "location";
public static final String INPUT_FORMAT = "input-format";
public static final String OUTPUT_FORMAT = "output-format";

private static final Map<String, PropertyEntry<?>> PROPERTIES_METADATA =
ImmutableMap.<String, PropertyEntry<?>>builder()
.put(COMMENT, stringReservedPropertyEntry(COMMENT, "table comment", true /* hidden */))
.put(
LOCATION,
stringImmutablePropertyEntry(
LOCATION,
"The location for Hudi table",
false /* required */,
null /* default value */,
false /* hidden */,
false /* reserved */))
.put(
INPUT_FORMAT,
stringReservedPropertyEntry(
INPUT_FORMAT,
"Hudi table input format used to distinguish the table type",
false /* hidden */))
.put(
OUTPUT_FORMAT,
stringReservedPropertyEntry(
OUTPUT_FORMAT, "Hudi table output format", false /* hidden */))
.build();

@Override
protected Map<String, PropertyEntry<?>> specificPropertyEntries() {
return Collections.emptyMap();
return PROPERTIES_METADATA;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,20 @@ public abstract class HudiCatalogBackend {

private final BackendType backendType;

private final HudiCatalogBackendOps catalogOps;
private final HudiCatalogBackendOps backendOps;

public abstract void initialize(Map<String, String> properties);

protected HudiCatalogBackend(BackendType backendType, HudiCatalogBackendOps catalogOps) {
protected HudiCatalogBackend(BackendType backendType, HudiCatalogBackendOps backendOps) {
this.backendType = backendType;
this.catalogOps = catalogOps;
this.backendOps = backendOps;
}

public BackendType type() {
return backendType;
}

public HudiCatalogBackendOps catalogOps() {
return catalogOps;
public HudiCatalogBackendOps backendOps() {
return backendOps;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ private HudiHMSBackend(BackendType backendType, HudiCatalogBackendOps catalogOps

@Override
public void initialize(Map<String, String> properties) {
// todo: initialize the catalogOps
catalogOps().initialize(properties);
backendOps().initialize(properties);
}
}
Loading

0 comments on commit 9ac9d3c

Please sign in to comment.