Skip to content

Commit

Permalink
Merge branch 'main' into CLI
Browse files Browse the repository at this point in the history
  • Loading branch information
justinmclean committed Oct 17, 2024
2 parents 9808022 + 93cdbc2 commit 423219c
Show file tree
Hide file tree
Showing 188 changed files with 7,218 additions and 893 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,7 @@ include clients/client-python/.gitignore
**/metastore_db
**/spark-warehouse
derby.log

web/node_modules
web/dist
web/.next
67 changes: 67 additions & 0 deletions api/src/main/java/org/apache/gravitino/credential/Credential.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.gravitino.credential;

import com.google.common.collect.ImmutableMap;
import java.util.Map;

/** Interface representing a credential with type, expiration time, and additional information. */
public interface Credential {
/** Credential type in the credential. */
String CREDENTIAL_TYPE = "credential-type";
/** Credential expire time in ms since the epoch. */
String EXPIRE_TIME_IN_MS = "expire-time-in-ms";

/**
* Returns the type of the credential. It should be the same as the credential type of the
* credential provider.
*
* @return the credential type as a String.
*/
String credentialType();

/**
* Returns the expiration time of the credential in milliseconds since the epoch, 0 means not
* expire.
*
* @return the expiration time as a long.
*/
long expireTimeInMs();

/**
* Returns credential information.
*
* @return a map of credential information.
*/
Map<String, String> credentialInfo();

/**
* Converts the credential to properties to transfer the credential though API.
*
* @return a map containing credential properties.
*/
default Map<String, String> toProperties() {
return new ImmutableMap.Builder<String, String>()
.putAll(credentialInfo())
.put(CREDENTIAL_TYPE, credentialType())
.put(EXPIRE_TIME_IN_MS, String.valueOf(expireTimeInMs()))
.build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.gravitino.exceptions;

import com.google.errorprone.annotations.FormatMethod;
import com.google.errorprone.annotations.FormatString;

/** Exception thrown when a user is forbidden to perform an action. */
public class ForbiddenException extends GravitinoRuntimeException {
/**
* Constructs a new exception with the specified detail message.
*
* @param message the detail message.
* @param args the arguments to the message.
*/
@FormatMethod
public ForbiddenException(@FormatString String message, Object... args) {
super(message, args);
}

/**
* Constructs a new exception with the specified detail message and cause.
*
* @param cause the cause.
* @param message the detail message.
* @param args the arguments to the message.
*/
@FormatMethod
public ForbiddenException(Throwable cause, @FormatString String message, Object... args) {
super(cause, message, args);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
import org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.BaseIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.meta.AuditInfo;
import org.apache.gravitino.meta.RoleEntity;
Expand All @@ -71,7 +71,7 @@
import org.slf4j.LoggerFactory;

@Tag("gravitino-docker-test")
public class RangerHiveE2EIT extends AbstractIT {
public class RangerHiveE2EIT extends BaseIT {
private static final Logger LOG = LoggerFactory.getLogger(RangerHiveE2EIT.class);

private static RangerAuthorizationPlugin rangerAuthPlugin;
Expand Down Expand Up @@ -99,15 +99,15 @@ public class RangerHiveE2EIT extends AbstractIT {
private static String RANGER_ADMIN_URL = null;

@BeforeAll
public static void startIntegrationTest() throws Exception {
public void startIntegrationTest() throws Exception {
// Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
configs.put(Configs.AUTHENTICATORS.getKey(), AuthenticatorType.SIMPLE.name().toLowerCase());
configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
super.startIntegrationTest();

RangerITEnv.setup();
RangerITEnv.startHiveRangerContainer();
Expand Down Expand Up @@ -180,7 +180,8 @@ private static void generateRangerSparkSecurityXML() throws IOException {
}

@AfterAll
public static void stop() throws IOException {
public void stop() throws IOException {
client = null;
if (client != null) {
Arrays.stream(catalog.asSchemas().listSchemas())
.filter(schema -> !schema.equals("default"))
Expand All @@ -204,7 +205,7 @@ public static void stop() throws IOException {
LOG.error("Failed to close CloseableGroup", e);
}

AbstractIT.client = null;
client = null;
}

@Test
Expand Down Expand Up @@ -267,7 +268,7 @@ void testAllowUseSchemaPrivilege() throws InterruptedException {
1, rows2.stream().filter(row -> row.getString(0).equals(schemaName)).count());
}

private static void createMetalake() {
private void createMetalake() {
GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
Assertions.assertEquals(0, gravitinoMetalakes.length);

Expand Down
5 changes: 3 additions & 2 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -745,7 +745,7 @@ tasks {
if (!it.name.startsWith("catalog") &&
!it.name.startsWith("authorization") &&
!it.name.startsWith("client") && !it.name.startsWith("filesystem") && !it.name.startsWith("spark") && !it.name.startsWith("iceberg") && it.name != "trino-connector" &&
it.name != "integration-test" && it.name != "hive-metastore-common" && !it.name.startsWith("flink")
it.name != "integration-test" && it.name != "hive-metastore-common" && !it.name.startsWith("flink") && it.name != "gcp-bundle"
) {
from(it.configurations.runtimeClasspath)
into("distribution/package/libs")
Expand All @@ -765,7 +765,7 @@ tasks {
!it.name.startsWith("integration-test") &&
!it.name.startsWith("flink") &&
!it.name.startsWith("trino-connector") &&
it.name != "hive-metastore-common"
it.name != "hive-metastore-common" && it.name != "gcp-bundle"
) {
dependsOn("${it.name}:build")
from("${it.name}/build/libs")
Expand All @@ -781,6 +781,7 @@ tasks {
":catalogs:catalog-hive:copyLibAndConfig",
":catalogs:catalog-lakehouse-iceberg:copyLibAndConfig",
":catalogs:catalog-lakehouse-paimon:copyLibAndConfig",
"catalogs:catalog-lakehouse-hudi:copyLibAndConfig",
":catalogs:catalog-jdbc-doris:copyLibAndConfig",
":catalogs:catalog-jdbc-mysql:copyLibAndConfig",
":catalogs:catalog-jdbc-postgresql:copyLibAndConfig",
Expand Down
22 changes: 22 additions & 0 deletions bundles/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

tasks.all {
enabled = false
}
46 changes: 46 additions & 0 deletions bundles/gcp-bundle/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar

plugins {
`maven-publish`
id("java")
alias(libs.plugins.shadow)
}

dependencies {
compileOnly(project(":catalogs:catalog-hadoop"))
compileOnly(libs.hadoop3.common)
implementation(libs.hadoop3.gcs)
}

tasks.withType(ShadowJar::class.java) {
isZip64 = true
configurations = listOf(project.configurations.runtimeClasspath.get())
archiveClassifier.set("")
}

tasks.jar {
dependsOn(tasks.named("shadowJar"))
archiveClassifier.set("empty")
}

tasks.compileJava {
dependsOn(":catalogs:catalog-hadoop:runtimeJars")
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.gravitino.gcs.fs;

import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem;
import java.io.IOException;
import java.util.Map;
import org.apache.gravitino.catalog.hadoop.fs.FileSystemProvider;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class GCSFileSystemProvider implements FileSystemProvider {
@Override
public FileSystem getFileSystem(Path path, Map<String, String> config) throws IOException {
Configuration configuration = new Configuration();
config.forEach(
(k, v) -> {
configuration.set(k.replace("gravitino.bypass.", ""), v);
});

return GoogleHadoopFileSystem.newInstance(path.toUri(), configuration);
}

@Override
public String scheme() {
return "gs";
}

@Override
public String name() {
return "gcs";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#

org.apache.gravitino.gcs.fs.GCSFileSystemProvider
4 changes: 4 additions & 0 deletions catalogs/catalog-hadoop/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ dependencies {
exclude(group = "*")
}

compileOnly(libs.guava)

implementation(libs.hadoop3.common) {
exclude("com.sun.jersey")
exclude("javax.servlet", "servlet-api")
Expand Down Expand Up @@ -71,6 +73,7 @@ dependencies {
testImplementation(project(":integration-test-common", "testArtifacts"))
testImplementation(project(":server"))
testImplementation(project(":server-common"))
testImplementation(project(":bundles:gcp-bundle"))

testImplementation(libs.minikdc)
testImplementation(libs.hadoop3.minicluster)
Expand All @@ -84,6 +87,7 @@ dependencies {
testImplementation(libs.junit.jupiter.params)
testImplementation(libs.testcontainers)
testImplementation(libs.testcontainers.mysql)
testImplementation(libs.hadoop3.gcs)

testRuntimeOnly(libs.junit.jupiter.engine)
}
Expand Down
Loading

0 comments on commit 423219c

Please sign in to comment.