diff --git a/.github/workflows/backend-integration-test.yml b/.github/workflows/backend-integration-test.yml index 3cd93760e91..68664e85d2b 100644 --- a/.github/workflows/backend-integration-test.yml +++ b/.github/workflows/backend-integration-test.yml @@ -95,7 +95,7 @@ jobs: - name: Backend Integration Test id: integrationTest run: | - ./gradlew test --rerun-tasks -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PskipWebITs -P${{ matrix.backend }} + ./gradlew test --rerun-tasks -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PskipWebITs -P${{ matrix.backend }} -PskipPyClientITs - name: Upload integrate tests reports uses: actions/upload-artifact@v3 diff --git a/.github/workflows/python-integration-test.yml b/.github/workflows/python-integration-test.yml index 95e18493fa6..2d9a6f9dd49 100644 --- a/.github/workflows/python-integration-test.yml +++ b/.github/workflows/python-integration-test.yml @@ -57,18 +57,18 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@v2 - - name: Free up disk space - run: | - dev/ci/util_free_space.sh - - name: Python Client Integration Test id: integrationTest run: | - #./gradlew compileDistribution -x test -PjdkVersion=${{ matrix.java-version }} - #for pythonVersion in "3.8" "3.9" "3.10" "3.11" - #do - # ./gradlew -PjdkVersion=${{ matrix.java-version }} -PpythonVersion=${pythonVersion} :client:client-python:integrationTest - #done + ./gradlew compileDistribution -x test -PjdkVersion=${{ matrix.java-version }} + + for pythonVersion in "3.8" "3.9" "3.10" "3.11" + do + echo "Use Python version ${pythonVersion} to test the Python client." + ./gradlew -PjdkVersion=${{ matrix.java-version }} -PpythonVersion=${pythonVersion} :client:client-python:test + # Clean Gravitino database to clean test data + rm -rf ./distribution/package/data + done - name: Upload integrate tests reports uses: actions/upload-artifact@v3 diff --git a/api/src/main/java/com/datastrato/gravitino/rel/indexes/Indexes.java b/api/src/main/java/com/datastrato/gravitino/rel/indexes/Indexes.java index 242ab20a90e..531c0290d05 100644 --- a/api/src/main/java/com/datastrato/gravitino/rel/indexes/Indexes.java +++ b/api/src/main/java/com/datastrato/gravitino/rel/indexes/Indexes.java @@ -72,7 +72,7 @@ public static final class IndexImpl implements Index { * @param name The name of the index * @param fieldNames The field names under the table contained in the index. */ - public IndexImpl(IndexType indexType, String name, String[][] fieldNames) { + private IndexImpl(IndexType indexType, String name, String[][] fieldNames) { this.indexType = indexType; this.name = name; this.fieldNames = fieldNames; diff --git a/build.gradle.kts b/build.gradle.kts index b8a79d08acc..6004c3a60df 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -282,100 +282,41 @@ subprojects { tasks.withType().configureEach { options.errorprone.isEnabled.set(true) - options.errorprone.disableAllChecks.set(true) options.errorprone.disableWarningsInGeneratedCode.set(true) - options.errorprone.enable( - "AnnotateFormatMethod", - "AlwaysThrows", - "ArrayEquals", - "ArrayToString", - "ArraysAsListPrimitiveArray", - "ArrayFillIncompatibleType", - "BadImport", - "BoxedPrimitiveEquality", - "ChainingConstructorIgnoresParameter", - "CheckNotNullMultipleTimes", - "ClassCanBeStatic", - "CollectionIncompatibleType", - "CollectionToArraySafeParameter", - "ComparingThisWithNull", - "ComparisonOutOfRange", - "CompatibleWithAnnotationMisuse", - "CompileTimeConstant", - "ConditionalExpressionNumericPromotion", - "DangerousLiteralNull", - "DeadException", - "DeadThread", - "DefaultCharset", - "DoNotCall", - "DoNotMock", - "DuplicateMapKeys", - "EqualsGetClass", - "EqualsNaN", - "EqualsNull", - "EqualsReference", - "EqualsWrongThing", - "ForOverride", - "FormatString", - "FormatStringAnnotation", - "GetClassOnAnnotation", - "GetClassOnClass", - "HashtableContains", - "IdentityBinaryExpression", - "IdentityHashMapBoxing", - "Immutable", - "ImmutableEnumChecker", - "Incomparable", - "IncompatibleArgumentType", - "IndexOfChar", - "InfiniteRecursion", - "InlineFormatString", - "InvalidJavaTimeConstant", - "InvalidPatternSyntax", - "IsInstanceIncompatibleType", - "JavaUtilDate", - "JUnit4ClassAnnotationNonStatic", - "JUnit4SetUpNotRun", - "JUnit4TearDownNotRun", - "JUnit4TestNotRun", - "JUnitAssertSameCheck", - "LockOnBoxedPrimitive", - "LoopConditionChecker", - "LossyPrimitiveCompare", - "MathRoundIntLong", - "MissingSuperCall", - "ModifyingCollectionWithItself", - "MutablePublicArray", - "NonCanonicalStaticImport", - "NonFinalCompileTimeConstant", - "NonRuntimeAnnotation", - "NullTernary", - "OptionalEquality", - "PackageInfo", - "ParametersButNotParameterized", - "RandomCast", - "RandomModInteger", - "ReferenceEquality", - "SelfAssignment", - "SelfComparison", - "SelfEquals", - "SizeGreaterThanOrEqualsZero", - "StaticGuardedByInstance", - "StreamToString", - "StringBuilderInitWithChar", - "SubstringOfZero", - "ThrowNull", - "TruthSelfEquals", - "TryFailThrowable", - "TypeParameterQualifier", - "UnnecessaryCheckNotNull", - "UnnecessaryTypeArgument", - "UnusedAnonymousClass", - "UnusedCollectionModifiedInPlace", - "UnusedVariable", - "UseCorrectAssertInTests", - "VarTypeName", - "XorPower" + options.errorprone.disable( + "AlmostJavadoc", + "CanonicalDuration", + "CheckReturnValue", + "ComparableType", + "ConstantOverflow", + "DoubleBraceInitialization", + "EqualsUnsafeCast", + "EmptyBlockTag", + "FutureReturnValueIgnored", + "InconsistentCapitalization", + "InconsistentHashCode", + "JavaTimeDefaultTimeZone", + "JdkObsolete", + "LockNotBeforeTry", + "MissingSummary", + "MissingOverride", + "MutableConstantField", + "NonOverridingEquals", + "ObjectEqualsForPrimitives", + "OperatorPrecedence", + "ReturnValueIgnored", + "SameNameButDifferent", + "StaticAssignmentInConstructor", + "StringSplitter", + "ThreadPriorityCheck", + "ThrowIfUncheckedKnownChecked", + "TypeParameterUnusedInFormals", + "UnicodeEscape", + "UnnecessaryParentheses", + "UnsafeReflectiveConstructionCast", + "UnusedMethod", + "VariableNameSameAsType", + "WaitNotInLoop" ) } } @@ -407,6 +348,7 @@ subprojects { plugins.apply(NodePlugin::class) configure { version.set("20.9.0") + pnpmVersion.set("9.x") nodeProjectDir.set(file("$rootDir/.node")) download.set(true) } diff --git a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java index d98f6e12d7b..8e1eb188b6b 100644 --- a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java +++ b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java @@ -25,4 +25,18 @@ public CapabilityResult columnDefaultValue() { "The DEFAULT constraint for column is only supported since Hive 3.0, " + "but the current Gravitino Hive catalog only supports Hive 2.x."); } + + @Override + public CapabilityResult caseSensitiveOnName(Scope scope) { + switch (scope) { + case SCHEMA: + case TABLE: + case COLUMN: + // Hive is case insensitive, see + // https://cwiki.apache.org/confluence/display/Hive/User+FAQ#UserFAQ-AreHiveSQLidentifiers(e.g.tablenames,columnnames,etc)casesensitive? + return CapabilityResult.unsupported("Hive is case insensitive."); + default: + return CapabilityResult.SUPPORTED; + } + } } diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java index c660d8185d3..46c3a105643 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java @@ -584,7 +584,7 @@ public void testHiveTableProperties() throws TException, InterruptedException { Assertions.assertEquals(TEXT_INPUT_FORMAT_CLASS, actualTable2.getSd().getInputFormat()); Assertions.assertEquals(IGNORE_KEY_OUTPUT_FORMAT_CLASS, actualTable2.getSd().getOutputFormat()); Assertions.assertEquals(EXTERNAL_TABLE.name(), actualTable2.getTableType()); - Assertions.assertEquals(table2, actualTable2.getSd().getSerdeInfo().getName()); + Assertions.assertEquals(table2.toLowerCase(), actualTable2.getSd().getSerdeInfo().getName()); Assertions.assertEquals(TABLE_COMMENT, actualTable2.getParameters().get(COMMENT)); Assertions.assertEquals( ((Boolean) tablePropertiesMetadata.getDefaultValue(EXTERNAL)).toString().toUpperCase(), @@ -1224,7 +1224,7 @@ private void assertDefaultTableProperties( Assertions.assertEquals( ((TableType) tablePropertiesMetadata.getDefaultValue(TABLE_TYPE)).name(), actualTable.getTableType()); - Assertions.assertEquals(tableName, actualTable.getSd().getSerdeInfo().getName()); + Assertions.assertEquals(tableName.toLowerCase(), actualTable.getSd().getSerdeInfo().getName()); Assertions.assertEquals( ((Boolean) tablePropertiesMetadata.getDefaultValue(EXTERNAL)).toString().toUpperCase(), actualTable.getParameters().get(EXTERNAL)); diff --git a/catalogs/catalog-jdbc-doris/src/main/java/com/datastrato/gravitino/catalog/doris/operation/DorisTableOperations.java b/catalogs/catalog-jdbc-doris/src/main/java/com/datastrato/gravitino/catalog/doris/operation/DorisTableOperations.java index 058e1c2f9fd..afa168c33e5 100644 --- a/catalogs/catalog-jdbc-doris/src/main/java/com/datastrato/gravitino/catalog/doris/operation/DorisTableOperations.java +++ b/catalogs/catalog-jdbc-doris/src/main/java/com/datastrato/gravitino/catalog/doris/operation/DorisTableOperations.java @@ -255,7 +255,7 @@ protected void correctJdbcTableFields( } // Doris Cannot get comment from JDBC 8.x, so we need to get comment from sql - String comment = ""; + StringBuilder comment = new StringBuilder(); String sql = "SELECT TABLE_COMMENT FROM information_schema.TABLES WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?"; try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) { @@ -264,10 +264,10 @@ protected void correctJdbcTableFields( try (ResultSet resultSet = preparedStatement.executeQuery()) { while (resultSet.next()) { - comment += resultSet.getString("TABLE_COMMENT"); + comment.append(resultSet.getString("TABLE_COMMENT")); } } - tableBuilder.withComment(comment); + tableBuilder.withComment(comment.toString()); } catch (SQLException e) { throw exceptionMapper.toGravitinoException(e); } diff --git a/catalogs/catalog-jdbc-mysql/src/main/java/com/datastrato/gravitino/catalog/mysql/operation/MysqlTableOperations.java b/catalogs/catalog-jdbc-mysql/src/main/java/com/datastrato/gravitino/catalog/mysql/operation/MysqlTableOperations.java index 1538b31c551..0446713c77b 100644 --- a/catalogs/catalog-jdbc-mysql/src/main/java/com/datastrato/gravitino/catalog/mysql/operation/MysqlTableOperations.java +++ b/catalogs/catalog-jdbc-mysql/src/main/java/com/datastrato/gravitino/catalog/mysql/operation/MysqlTableOperations.java @@ -85,7 +85,7 @@ protected String generateCreateTableSql( } Preconditions.checkArgument( - distribution == Distributions.NONE, "MySQL does not support distribution"); + Distributions.NONE.equals(distribution), "MySQL does not support distribution"); validateIncrementCol(columns, indexes); StringBuilder sqlBuilder = new StringBuilder(); diff --git a/catalogs/catalog-jdbc-postgresql/src/main/java/com/datastrato/gravitino/catalog/postgresql/operation/PostgreSqlTableOperations.java b/catalogs/catalog-jdbc-postgresql/src/main/java/com/datastrato/gravitino/catalog/postgresql/operation/PostgreSqlTableOperations.java index 44b09f588e3..e7d8c058fde 100644 --- a/catalogs/catalog-jdbc-postgresql/src/main/java/com/datastrato/gravitino/catalog/postgresql/operation/PostgreSqlTableOperations.java +++ b/catalogs/catalog-jdbc-postgresql/src/main/java/com/datastrato/gravitino/catalog/postgresql/operation/PostgreSqlTableOperations.java @@ -84,7 +84,7 @@ protected String generateCreateTableSql( "Currently we do not support Partitioning in PostgreSQL"); } Preconditions.checkArgument( - distribution == Distributions.NONE, "PostgreSQL does not support distribution"); + Distributions.NONE.equals(distribution), "PostgreSQL does not support distribution"); StringBuilder sqlBuilder = new StringBuilder(); sqlBuilder diff --git a/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java b/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java index 938c73e5813..3c5c423aed3 100644 --- a/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java +++ b/catalogs/catalog-kafka/src/main/java/com/datastrato/gravitino/catalog/kafka/KafkaCatalogOperations.java @@ -216,7 +216,7 @@ public Topic createTopic( LOG.info( "Created topic {}[id: {}] with {} partitions and replication factor {}", ident, - topicId.toString(), + topicId, createTopicsResult.numPartitions(ident.name()).get(), createTopicsResult.replicationFactor(ident.name()).get()); diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java index b3fc85e5883..3c28fbd6527 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java @@ -128,7 +128,7 @@ public NameIdentifier[] listSchemas(Namespace namespace) throws NoSuchCatalogExc icebergTableOps.listNamespace(IcebergTableOpsHelper.getIcebergNamespace()).namespaces(); return namespaces.stream() - .map(icebergNamespace -> NameIdentifier.of(icebergNamespace.levels())) + .map(icebergNamespace -> NameIdentifier.of(namespace, icebergNamespace.toString())) .toArray(NameIdentifier[]::new); } catch (NoSuchNamespaceException e) { throw new NoSuchSchemaException( diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java index 2017e899f38..a510c92598c 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergSchema.java @@ -65,6 +65,18 @@ public void testCreateIcebergSchema() { Assertions.assertTrue(exception.getMessage().contains("already exists")); } + @Test + public void testListSchema() { + IcebergCatalog icebergCatalog = initIcebergCatalog("testListIcebergSchema"); + NameIdentifier ident = NameIdentifier.of("metalake", icebergCatalog.name(), "test"); + icebergCatalog.asSchemas().createSchema(ident, COMMENT_VALUE, Maps.newHashMap()); + + NameIdentifier[] schemas = icebergCatalog.asSchemas().listSchemas(ident.namespace()); + Assertions.assertEquals(1, schemas.length); + Assertions.assertEquals(ident.name(), schemas[0].name()); + Assertions.assertEquals(ident.namespace(), schemas[0].namespace()); + } + @Test public void testAlterSchema() { IcebergCatalog icebergCatalog = initIcebergCatalog("testAlterSchema"); diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/ErrorHandlers.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/ErrorHandlers.java index a18d3febcdb..d140b0c2a70 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/ErrorHandlers.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/ErrorHandlers.java @@ -10,22 +10,28 @@ import com.datastrato.gravitino.exceptions.BadRequestException; import com.datastrato.gravitino.exceptions.CatalogAlreadyExistsException; import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; +import com.datastrato.gravitino.exceptions.GroupAlreadyExistsException; import com.datastrato.gravitino.exceptions.MetalakeAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchCatalogException; import com.datastrato.gravitino.exceptions.NoSuchFilesetException; +import com.datastrato.gravitino.exceptions.NoSuchGroupException; import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; import com.datastrato.gravitino.exceptions.NoSuchPartitionException; +import com.datastrato.gravitino.exceptions.NoSuchRoleException; import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NoSuchTableException; import com.datastrato.gravitino.exceptions.NoSuchTopicException; +import com.datastrato.gravitino.exceptions.NoSuchUserException; import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.NotFoundException; import com.datastrato.gravitino.exceptions.PartitionAlreadyExistsException; import com.datastrato.gravitino.exceptions.RESTException; +import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; import com.datastrato.gravitino.exceptions.TopicAlreadyExistsException; import com.datastrato.gravitino.exceptions.UnauthorizedException; +import com.datastrato.gravitino.exceptions.UserAlreadyExistsException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import java.util.List; @@ -123,6 +129,33 @@ public static Consumer topicErrorHandler() { return TopicErrorHandler.INSTANCE; } + /** + * Creates an error handler specific to User operations. + * + * @return A Consumer representing the User error handler. + */ + public static Consumer userErrorHandler() { + return UserErrorHandler.INSTANCE; + } + + /** + * Creates an error handler specific to Group operations. + * + * @return A Consumer representing the Group error handler. + */ + public static Consumer groupErrorHandler() { + return GroupErrorHandler.INSTANCE; + } + + /** + * Creates an error handler specific to Role operations. + * + * @return A Consumer representing the Role error handler. + */ + public static Consumer roleErrorHandler() { + return RoleErrorHandler.INSTANCE; + } + private ErrorHandlers() {} /** @@ -459,6 +492,111 @@ public void accept(ErrorResponse errorResponse) { } } + /** Error handler specific to User operations. */ + @SuppressWarnings("FormatStringAnnotation") + private static class UserErrorHandler extends RestErrorHandler { + + private static final UserErrorHandler INSTANCE = new UserErrorHandler(); + + @Override + public void accept(ErrorResponse errorResponse) { + String errorMessage = formatErrorMessage(errorResponse); + + switch (errorResponse.getCode()) { + case ErrorConstants.ILLEGAL_ARGUMENTS_CODE: + throw new IllegalArgumentException(errorMessage); + + case ErrorConstants.NOT_FOUND_CODE: + if (errorResponse.getType().equals(NoSuchMetalakeException.class.getSimpleName())) { + throw new NoSuchMetalakeException(errorMessage); + } else if (errorResponse.getType().equals(NoSuchUserException.class.getSimpleName())) { + throw new NoSuchUserException(errorMessage); + } else { + throw new NotFoundException(errorMessage); + } + + case ErrorConstants.ALREADY_EXISTS_CODE: + throw new UserAlreadyExistsException(errorMessage); + + case ErrorConstants.INTERNAL_ERROR_CODE: + throw new RuntimeException(errorMessage); + + default: + super.accept(errorResponse); + } + } + } + + /** Error handler specific to Group operations. */ + @SuppressWarnings("FormatStringAnnotation") + private static class GroupErrorHandler extends RestErrorHandler { + + private static final GroupErrorHandler INSTANCE = new GroupErrorHandler(); + + @Override + public void accept(ErrorResponse errorResponse) { + String errorMessage = formatErrorMessage(errorResponse); + + switch (errorResponse.getCode()) { + case ErrorConstants.ILLEGAL_ARGUMENTS_CODE: + throw new IllegalArgumentException(errorMessage); + + case ErrorConstants.NOT_FOUND_CODE: + if (errorResponse.getType().equals(NoSuchMetalakeException.class.getSimpleName())) { + throw new NoSuchMetalakeException(errorMessage); + } else if (errorResponse.getType().equals(NoSuchGroupException.class.getSimpleName())) { + throw new NoSuchGroupException(errorMessage); + } else { + throw new NotFoundException(errorMessage); + } + + case ErrorConstants.ALREADY_EXISTS_CODE: + throw new GroupAlreadyExistsException(errorMessage); + + case ErrorConstants.INTERNAL_ERROR_CODE: + throw new RuntimeException(errorMessage); + + default: + super.accept(errorResponse); + } + } + } + + /** Error handler specific to Role operations. */ + @SuppressWarnings("FormatStringAnnotation") + private static class RoleErrorHandler extends RestErrorHandler { + + private static final RoleErrorHandler INSTANCE = new RoleErrorHandler(); + + @Override + public void accept(ErrorResponse errorResponse) { + String errorMessage = formatErrorMessage(errorResponse); + + switch (errorResponse.getCode()) { + case ErrorConstants.ILLEGAL_ARGUMENTS_CODE: + throw new IllegalArgumentException(errorMessage); + + case ErrorConstants.NOT_FOUND_CODE: + if (errorResponse.getType().equals(NoSuchMetalakeException.class.getSimpleName())) { + throw new NoSuchMetalakeException(errorMessage); + } else if (errorResponse.getType().equals(NoSuchRoleException.class.getSimpleName())) { + throw new NoSuchRoleException(errorMessage); + } else { + throw new NotFoundException(errorMessage); + } + + case ErrorConstants.ALREADY_EXISTS_CODE: + throw new RoleAlreadyExistsException(errorMessage); + + case ErrorConstants.INTERNAL_ERROR_CODE: + throw new RuntimeException(errorMessage); + + default: + super.accept(errorResponse); + } + } + } + /** Generic error handler for REST requests. */ private static class RestErrorHandler extends ErrorHandler { private static final ErrorHandler INSTANCE = new RestErrorHandler(); diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java index 797991c7dde..07675037f2f 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoAdminClient.java @@ -8,19 +8,39 @@ import com.datastrato.gravitino.MetalakeChange; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.SupportsMetalakes; +import com.datastrato.gravitino.authorization.Group; +import com.datastrato.gravitino.authorization.Privilege; +import com.datastrato.gravitino.authorization.Role; +import com.datastrato.gravitino.authorization.SecurableObject; +import com.datastrato.gravitino.authorization.User; +import com.datastrato.gravitino.dto.requests.GroupAddRequest; import com.datastrato.gravitino.dto.requests.MetalakeCreateRequest; import com.datastrato.gravitino.dto.requests.MetalakeUpdateRequest; import com.datastrato.gravitino.dto.requests.MetalakeUpdatesRequest; +import com.datastrato.gravitino.dto.requests.RoleCreateRequest; +import com.datastrato.gravitino.dto.requests.UserAddRequest; +import com.datastrato.gravitino.dto.responses.DeleteResponse; import com.datastrato.gravitino.dto.responses.DropResponse; +import com.datastrato.gravitino.dto.responses.GroupResponse; import com.datastrato.gravitino.dto.responses.MetalakeListResponse; import com.datastrato.gravitino.dto.responses.MetalakeResponse; +import com.datastrato.gravitino.dto.responses.RemoveResponse; +import com.datastrato.gravitino.dto.responses.RoleResponse; +import com.datastrato.gravitino.dto.responses.UserResponse; +import com.datastrato.gravitino.exceptions.GroupAlreadyExistsException; import com.datastrato.gravitino.exceptions.MetalakeAlreadyExistsException; +import com.datastrato.gravitino.exceptions.NoSuchGroupException; import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; +import com.datastrato.gravitino.exceptions.NoSuchRoleException; +import com.datastrato.gravitino.exceptions.NoSuchUserException; +import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; +import com.datastrato.gravitino.exceptions.UserAlreadyExistsException; import com.google.common.base.Preconditions; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,6 +54,10 @@ public class GravitinoAdminClient extends GravitinoClientBase implements SupportsMetalakes { private static final Logger LOG = LoggerFactory.getLogger(GravitinoAdminClient.class); + private static final String API_METALAKES_USERS_PATH = "api/metalakes/%s/users/%s"; + private static final String API_METALAKES_GROUPS_PATH = "api/metalakes/%s/groups/%s"; + private static final String API_METALAKES_ROLES_PATH = "api/metalakes/%s/roles/%s"; + private static final String API_ADMIN_PATH = "api/admins/%s"; /** * Constructs a new GravitinoClient with the given URI, authenticator and AuthDataProvider. @@ -162,6 +186,282 @@ public boolean dropMetalake(NameIdentifier ident) { } } + /** + * Adds a new User. + * + * @param metalake The Metalake of the User. + * @param user The name of the User. + * @return The added User instance. + * @throws UserAlreadyExistsException If a User with the same name already exists. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If adding the User encounters storage issues. + */ + public User addUser(String metalake, String user) + throws UserAlreadyExistsException, NoSuchMetalakeException { + UserAddRequest req = new UserAddRequest(user); + req.validate(); + + UserResponse resp = + restClient.post( + String.format(API_METALAKES_USERS_PATH, metalake, ""), + req, + UserResponse.class, + Collections.emptyMap(), + ErrorHandlers.userErrorHandler()); + resp.validate(); + + return resp.getUser(); + } + + /** + * Removes a User. + * + * @param metalake The Metalake of the User. + * @param user The name of the User. + * @return `true` if the User was successfully removed, `false` only when there's no such user, + * otherwise it will throw an exception. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If removing the User encounters storage issues. + */ + public boolean removeUser(String metalake, String user) throws NoSuchMetalakeException { + RemoveResponse resp = + restClient.delete( + String.format(API_METALAKES_USERS_PATH, metalake, user), + RemoveResponse.class, + Collections.emptyMap(), + ErrorHandlers.userErrorHandler()); + resp.validate(); + + return resp.removed(); + } + + /** + * Gets a User. + * + * @param metalake The Metalake of the User. + * @param user The name of the User. + * @return The getting User instance. + * @throws NoSuchUserException If the User with the given name does not exist. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If getting the User encounters storage issues. + */ + public User getUser(String metalake, String user) + throws NoSuchUserException, NoSuchMetalakeException { + UserResponse resp = + restClient.get( + String.format(API_METALAKES_USERS_PATH, metalake, user), + UserResponse.class, + Collections.emptyMap(), + ErrorHandlers.userErrorHandler()); + resp.validate(); + + return resp.getUser(); + } + + /** + * Adds a new Group. + * + * @param metalake The Metalake of the Group. + * @param group The name of the Group. + * @return The Added Group instance. + * @throws GroupAlreadyExistsException If a Group with the same name already exists. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If adding the Group encounters storage issues. + */ + public Group addGroup(String metalake, String group) + throws GroupAlreadyExistsException, NoSuchMetalakeException { + GroupAddRequest req = new GroupAddRequest(group); + req.validate(); + + GroupResponse resp = + restClient.post( + String.format(API_METALAKES_GROUPS_PATH, metalake, ""), + req, + GroupResponse.class, + Collections.emptyMap(), + ErrorHandlers.groupErrorHandler()); + resp.validate(); + + return resp.getGroup(); + } + + /** + * Removes a Group. + * + * @param metalake The Metalake of the Group. + * @param group THe name of the Group. + * @return `true` if the Group was successfully removed, `false` only when there's no such group, + * otherwise it will throw an exception. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If removing the Group encounters storage issues. + */ + public boolean removeGroup(String metalake, String group) throws NoSuchMetalakeException { + RemoveResponse resp = + restClient.delete( + String.format(API_METALAKES_GROUPS_PATH, metalake, group), + RemoveResponse.class, + Collections.emptyMap(), + ErrorHandlers.groupErrorHandler()); + resp.validate(); + + return resp.removed(); + } + + /** + * Gets a Group. + * + * @param metalake The Metalake of the Group. + * @param group The name of the Group. + * @return The getting Group instance. + * @throws NoSuchGroupException If the Group with the given name does not exist. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If getting the Group encounters storage issues. + */ + public Group getGroup(String metalake, String group) + throws NoSuchGroupException, NoSuchMetalakeException { + GroupResponse resp = + restClient.get( + String.format(API_METALAKES_GROUPS_PATH, metalake, group), + GroupResponse.class, + Collections.emptyMap(), + ErrorHandlers.groupErrorHandler()); + resp.validate(); + + return resp.getGroup(); + } + + /** + * Adds a new metalake admin. + * + * @param user The name of the User. + * @return The added User instance. + * @throws UserAlreadyExistsException If a metalake admin with the same name already exists. + * @throws RuntimeException If adding the User encounters storage issues. + */ + public User addMetalakeAdmin(String user) throws UserAlreadyExistsException { + UserAddRequest req = new UserAddRequest(user); + req.validate(); + + UserResponse resp = + restClient.post( + String.format(API_ADMIN_PATH, ""), + req, + UserResponse.class, + Collections.emptyMap(), + ErrorHandlers.userErrorHandler()); + resp.validate(); + + return resp.getUser(); + } + + /** + * Removes a metalake admin. + * + * @param user The name of the User. + * @return `true` if the User was successfully removed, `false` only when there's no such metalake + * admin, otherwise it will throw an exception. + * @throws RuntimeException If removing the User encounters storage issues. + */ + public boolean removeMetalakeAdmin(String user) { + RemoveResponse resp = + restClient.delete( + String.format(API_ADMIN_PATH, user), + RemoveResponse.class, + Collections.emptyMap(), + ErrorHandlers.userErrorHandler()); + resp.validate(); + + return resp.removed(); + } + + /** + * Gets a Role. + * + * @param metalake The Metalake of the Role. + * @param role The name of the Role. + * @return The getting Role instance. + * @throws NoSuchRoleException If the Role with the given name does not exist. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If getting the Role encounters storage issues. + */ + public Role getRole(String metalake, String role) + throws NoSuchRoleException, NoSuchMetalakeException { + RoleResponse resp = + restClient.get( + String.format(API_METALAKES_ROLES_PATH, metalake, role), + RoleResponse.class, + Collections.emptyMap(), + ErrorHandlers.roleErrorHandler()); + resp.validate(); + + return resp.getRole(); + } + + /** + * Deletes a Role. + * + * @param metalake The Metalake of the Role. + * @param role The name of the Role. + * @return `true` if the Role was successfully deleted, `false` only when there's no such role, + * otherwise it will throw an exception. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If deleting the Role encounters storage issues. + */ + public boolean deleteRole(String metalake, String role) throws NoSuchMetalakeException { + DeleteResponse resp = + restClient.delete( + String.format(API_METALAKES_ROLES_PATH, metalake, role), + DeleteResponse.class, + Collections.emptyMap(), + ErrorHandlers.roleErrorHandler()); + resp.validate(); + + return resp.deleted(); + } + + /** + * Creates a new Role. + * + * @param metalake The Metalake of the Role. + * @param role The name of the Role. + * @param properties The properties of the Role. + * @param securableObject The securable object of the Role. + * @param privileges The privileges of the Role. + * @return The created Role instance. + * @throws RoleAlreadyExistsException If a Role with the same name already exists. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If creating the Role encounters storage issues. + */ + public Role createRole( + String metalake, + String role, + Map properties, + SecurableObject securableObject, + List privileges) + throws RoleAlreadyExistsException, NoSuchMetalakeException { + RoleCreateRequest req = + new RoleCreateRequest( + role, + properties, + privileges.stream() + .map(Privilege::name) + .map(Objects::toString) + .collect(Collectors.toList()), + securableObject.toString()); + req.validate(); + + RoleResponse resp = + restClient.post( + String.format(API_METALAKES_ROLES_PATH, metalake, ""), + req, + RoleResponse.class, + Collections.emptyMap(), + ErrorHandlers.roleErrorHandler()); + resp.validate(); + + return resp.getRole(); + } + /** * Creates a new builder for constructing a GravitinoClient. * diff --git a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClientBase.java b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClientBase.java index 6b83af03623..04490659816 100644 --- a/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClientBase.java +++ b/clients/client-java/src/main/java/com/datastrato/gravitino/client/GravitinoClientBase.java @@ -15,6 +15,7 @@ import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import com.google.errorprone.annotations.InlineMe; import java.io.Closeable; import java.net.URI; import java.net.URISyntaxException; @@ -128,7 +129,8 @@ public GravitinoMetalake loadMetalake(NameIdentifier ident) throws NoSuchMetalak * @return A GravitinoVersion instance representing the version of the Gravitino API. */ @Deprecated - public GravitinoVersion getVersion() { + @InlineMe(replacement = "this.serverVersion()") + public final GravitinoVersion getVersion() { return serverVersion(); } diff --git a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestGravitinoVersion.java b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestGravitinoVersion.java index d9ca564359f..c94b613af67 100644 --- a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestGravitinoVersion.java +++ b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestGravitinoVersion.java @@ -58,7 +58,7 @@ void testVersionCompare() { // test less than version1 = new GravitinoVersion("2.5.3", "2023-01-01", "1234567"); version2 = new GravitinoVersion("2.5.4", "2023-01-01", "1234567"); - assertTrue(version1.compareTo(version2) < 1); + assertTrue(version1.compareTo(version2) < 0); // test greater than version1 = new GravitinoVersion("2.5.3", "2023-01-01", "1234567"); diff --git a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestMetalakeAdmin.java b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestMetalakeAdmin.java new file mode 100644 index 00000000000..66096b7f7b5 --- /dev/null +++ b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestMetalakeAdmin.java @@ -0,0 +1,96 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.client; + +import static javax.servlet.http.HttpServletResponse.SC_CONFLICT; +import static javax.servlet.http.HttpServletResponse.SC_OK; +import static org.apache.hc.core5.http.HttpStatus.SC_SERVER_ERROR; + +import com.datastrato.gravitino.authorization.User; +import com.datastrato.gravitino.dto.AuditDTO; +import com.datastrato.gravitino.dto.authorization.UserDTO; +import com.datastrato.gravitino.dto.requests.UserAddRequest; +import com.datastrato.gravitino.dto.responses.ErrorResponse; +import com.datastrato.gravitino.dto.responses.RemoveResponse; +import com.datastrato.gravitino.dto.responses.UserResponse; +import com.datastrato.gravitino.exceptions.UserAlreadyExistsException; +import java.time.Instant; +import org.apache.hc.core5.http.Method; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestMetalakeAdmin extends TestBase { + + private static final String API_ADMINS_PATH = "api/admins/%s"; + + @BeforeAll + public static void setUp() throws Exception { + TestBase.setUp(); + } + + @Test + public void testAddMetalakeAdmin() throws Exception { + String username = "user"; + String userPath = withSlash(String.format(API_ADMINS_PATH, "")); + UserAddRequest request = new UserAddRequest(username); + + UserDTO mockUser = mockUserDTO(username); + UserResponse userResponse = new UserResponse(mockUser); + buildMockResource(Method.POST, userPath, request, userResponse, SC_OK); + + User addedUser = client.addMetalakeAdmin(username); + Assertions.assertNotNull(addedUser); + assertUser(addedUser, mockUser); + + // test UserAlreadyExistsException + ErrorResponse errResp1 = + ErrorResponse.alreadyExists( + UserAlreadyExistsException.class.getSimpleName(), "user already exists"); + buildMockResource(Method.POST, userPath, request, errResp1, SC_CONFLICT); + Exception ex = + Assertions.assertThrows( + UserAlreadyExistsException.class, () -> client.addMetalakeAdmin(username)); + Assertions.assertEquals("user already exists", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.POST, userPath, request, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.addMetalakeAdmin(username), "internal error"); + } + + @Test + public void testRemoveMetalakeAdmin() throws Exception { + String username = "user"; + String rolePath = withSlash(String.format(API_ADMINS_PATH, username)); + + RemoveResponse removeResponse = new RemoveResponse(true); + buildMockResource(Method.DELETE, rolePath, null, removeResponse, SC_OK); + + Assertions.assertTrue(client.removeMetalakeAdmin(username)); + + removeResponse = new RemoveResponse(false); + buildMockResource(Method.DELETE, rolePath, null, removeResponse, SC_OK); + Assertions.assertFalse(client.removeMetalakeAdmin(username)); + + // test RuntimeException + ErrorResponse errResp = ErrorResponse.internalError("internal error"); + buildMockResource(Method.DELETE, rolePath, null, errResp, SC_SERVER_ERROR); + Assertions.assertThrows(RuntimeException.class, () -> client.removeMetalakeAdmin(username)); + } + + private UserDTO mockUserDTO(String name) { + return UserDTO.builder() + .withName(name) + .withAudit(AuditDTO.builder().withCreator("creator").withCreateTime(Instant.now()).build()) + .build(); + } + + private void assertUser(User expected, User actual) { + Assertions.assertEquals(expected.name(), actual.name()); + Assertions.assertEquals(expected.roles(), actual.roles()); + } +} diff --git a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRole.java b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRole.java new file mode 100644 index 00000000000..560bb52e498 --- /dev/null +++ b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestRole.java @@ -0,0 +1,187 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.client; + +import static javax.servlet.http.HttpServletResponse.SC_CONFLICT; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; +import static org.apache.hc.core5.http.HttpStatus.SC_SERVER_ERROR; + +import com.datastrato.gravitino.authorization.Privileges; +import com.datastrato.gravitino.authorization.Role; +import com.datastrato.gravitino.authorization.SecurableObjects; +import com.datastrato.gravitino.dto.AuditDTO; +import com.datastrato.gravitino.dto.authorization.RoleDTO; +import com.datastrato.gravitino.dto.requests.RoleCreateRequest; +import com.datastrato.gravitino.dto.responses.DeleteResponse; +import com.datastrato.gravitino.dto.responses.ErrorResponse; +import com.datastrato.gravitino.dto.responses.RoleResponse; +import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; +import com.datastrato.gravitino.exceptions.NoSuchRoleException; +import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import java.time.Instant; +import org.apache.hc.core5.http.Method; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestRole extends TestBase { + + private static final String API_METALAKES_ROLES_PATH = "api/metalakes/%s/roles/%s"; + protected static final String metalakeName = "testMetalake"; + + @BeforeAll + public static void setUp() throws Exception { + TestBase.setUp(); + } + + @Test + public void testCreateRoles() throws Exception { + String roleName = "role"; + String rolePath = withSlash(String.format(API_METALAKES_ROLES_PATH, metalakeName, "")); + RoleCreateRequest request = + new RoleCreateRequest( + roleName, ImmutableMap.of("k1", "v1"), Lists.newArrayList("LOAD_CATALOG"), "catalog"); + + RoleDTO mockRole = mockRoleDTO(roleName); + RoleResponse roleResponse = new RoleResponse(mockRole); + buildMockResource(Method.POST, rolePath, request, roleResponse, SC_OK); + + Role createdRole = + client.createRole( + metalakeName, + roleName, + ImmutableMap.of("k1", "v1"), + SecurableObjects.ofCatalog("catalog"), + Lists.newArrayList(Privileges.LoadCatalog.get())); + Assertions.assertNotNull(createdRole); + assertRole(createdRole, mockRole); + + // test RoleAlreadyExistsException + ErrorResponse errResp1 = + ErrorResponse.alreadyExists( + RoleAlreadyExistsException.class.getSimpleName(), "role already exists"); + buildMockResource(Method.POST, rolePath, request, errResp1, SC_CONFLICT); + Exception ex = + Assertions.assertThrows( + RoleAlreadyExistsException.class, + () -> + client.createRole( + metalakeName, + roleName, + ImmutableMap.of("k1", "v1"), + SecurableObjects.ofCatalog("catalog"), + Lists.newArrayList(Privileges.LoadCatalog.get()))); + Assertions.assertEquals("role already exists", ex.getMessage()); + + // test NoSuchMetalakeException + ErrorResponse errResp2 = + ErrorResponse.notFound(NoSuchMetalakeException.class.getSimpleName(), "metalake not found"); + buildMockResource(Method.POST, rolePath, request, errResp2, SC_NOT_FOUND); + ex = + Assertions.assertThrows( + NoSuchMetalakeException.class, + () -> + client.createRole( + metalakeName, + roleName, + ImmutableMap.of("k1", "v1"), + SecurableObjects.ofCatalog("catalog"), + Lists.newArrayList(Privileges.LoadCatalog.get()))); + Assertions.assertEquals("metalake not found", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.POST, rolePath, request, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, + () -> + client.createRole( + metalakeName, + roleName, + ImmutableMap.of("k1", "v1"), + SecurableObjects.ofCatalog("catalog"), + Lists.newArrayList(Privileges.LoadCatalog.get())), + "internal error"); + } + + @Test + public void testGetRoles() throws Exception { + String roleName = "role"; + String rolePath = withSlash(String.format(API_METALAKES_ROLES_PATH, metalakeName, roleName)); + + RoleDTO mockRole = mockRoleDTO(roleName); + RoleResponse roleResponse = new RoleResponse(mockRole); + buildMockResource(Method.GET, rolePath, null, roleResponse, SC_OK); + + Role loadedRole = client.getRole(metalakeName, roleName); + Assertions.assertNotNull(loadedRole); + assertRole(mockRole, loadedRole); + + // test NoSuchRoleException + ErrorResponse errResp1 = + ErrorResponse.notFound(NoSuchRoleException.class.getSimpleName(), "role not found"); + buildMockResource(Method.GET, rolePath, null, errResp1, SC_NOT_FOUND); + Exception ex = + Assertions.assertThrows( + NoSuchRoleException.class, () -> client.getRole(metalakeName, roleName)); + Assertions.assertEquals("role not found", ex.getMessage()); + + // test NoSuchMetalakeException + ErrorResponse errResp2 = + ErrorResponse.notFound(NoSuchMetalakeException.class.getSimpleName(), "metalake not found"); + buildMockResource(Method.GET, rolePath, null, errResp2, SC_NOT_FOUND); + ex = + Assertions.assertThrows( + NoSuchMetalakeException.class, () -> client.getRole(metalakeName, roleName)); + Assertions.assertEquals("metalake not found", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.GET, rolePath, null, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.getRole(metalakeName, roleName), "internal error"); + } + + @Test + public void testDeleteRoles() throws Exception { + String roleName = "role"; + String rolePath = withSlash(String.format(API_METALAKES_ROLES_PATH, metalakeName, roleName)); + + DeleteResponse deleteResponse = new DeleteResponse(true); + buildMockResource(Method.DELETE, rolePath, null, deleteResponse, SC_OK); + + Assertions.assertTrue(client.deleteRole(metalakeName, roleName)); + + deleteResponse = new DeleteResponse(false); + buildMockResource(Method.DELETE, rolePath, null, deleteResponse, SC_OK); + Assertions.assertFalse(client.deleteRole(metalakeName, roleName)); + + // test RuntimeException + ErrorResponse errResp = ErrorResponse.internalError("internal error"); + buildMockResource(Method.DELETE, rolePath, null, errResp, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.deleteRole(metalakeName, roleName)); + } + + private RoleDTO mockRoleDTO(String name) { + return RoleDTO.builder() + .withName(name) + .withProperties(ImmutableMap.of("k1", "v1")) + .withSecurableObject(SecurableObjects.of("catalog")) + .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) + .withAudit(AuditDTO.builder().withCreator("creator").withCreateTime(Instant.now()).build()) + .build(); + } + + private void assertRole(Role expected, Role actual) { + Assertions.assertEquals(expected.name(), actual.name()); + Assertions.assertEquals(expected.privileges(), actual.privileges()); + Assertions.assertEquals( + expected.securableObject().toString(), actual.securableObject().toString()); + } +} diff --git a/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestUserGroup.java b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestUserGroup.java new file mode 100644 index 00000000000..beb37fdf81e --- /dev/null +++ b/clients/client-java/src/test/java/com/datastrato/gravitino/client/TestUserGroup.java @@ -0,0 +1,266 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.client; + +import static javax.servlet.http.HttpServletResponse.SC_CONFLICT; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; +import static org.apache.hc.core5.http.HttpStatus.SC_SERVER_ERROR; + +import com.datastrato.gravitino.authorization.Group; +import com.datastrato.gravitino.authorization.User; +import com.datastrato.gravitino.dto.AuditDTO; +import com.datastrato.gravitino.dto.authorization.GroupDTO; +import com.datastrato.gravitino.dto.authorization.UserDTO; +import com.datastrato.gravitino.dto.requests.GroupAddRequest; +import com.datastrato.gravitino.dto.requests.UserAddRequest; +import com.datastrato.gravitino.dto.responses.ErrorResponse; +import com.datastrato.gravitino.dto.responses.GroupResponse; +import com.datastrato.gravitino.dto.responses.RemoveResponse; +import com.datastrato.gravitino.dto.responses.UserResponse; +import com.datastrato.gravitino.exceptions.GroupAlreadyExistsException; +import com.datastrato.gravitino.exceptions.NoSuchGroupException; +import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; +import com.datastrato.gravitino.exceptions.NoSuchUserException; +import com.datastrato.gravitino.exceptions.UserAlreadyExistsException; +import java.time.Instant; +import org.apache.hc.core5.http.Method; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestUserGroup extends TestBase { + + private static final String API_METALAKES_USERS_PATH = "api/metalakes/%s/users/%s"; + private static final String API_METALAKES_GROUPS_PATH = "api/metalakes/%s/groups/%s"; + protected static final String metalakeName = "testMetalake"; + + @BeforeAll + public static void setUp() throws Exception { + TestBase.setUp(); + } + + @Test + public void testAddUsers() throws Exception { + String username = "user"; + String userPath = withSlash(String.format(API_METALAKES_USERS_PATH, metalakeName, "")); + UserAddRequest request = new UserAddRequest(username); + + UserDTO mockUser = mockUserDTO(username); + UserResponse userResponse = new UserResponse(mockUser); + buildMockResource(Method.POST, userPath, request, userResponse, SC_OK); + + User addedUser = client.addUser(metalakeName, username); + Assertions.assertNotNull(addedUser); + assertUser(addedUser, mockUser); + + // test UserAlreadyExistsException + ErrorResponse errResp1 = + ErrorResponse.alreadyExists( + UserAlreadyExistsException.class.getSimpleName(), "user already exists"); + buildMockResource(Method.POST, userPath, request, errResp1, SC_CONFLICT); + Exception ex = + Assertions.assertThrows( + UserAlreadyExistsException.class, () -> client.addUser(metalakeName, username)); + Assertions.assertEquals("user already exists", ex.getMessage()); + + // test NoSuchMetalakeException + ErrorResponse errResp2 = + ErrorResponse.notFound(NoSuchMetalakeException.class.getSimpleName(), "metalake not found"); + buildMockResource(Method.POST, userPath, request, errResp2, SC_NOT_FOUND); + ex = + Assertions.assertThrows( + NoSuchMetalakeException.class, () -> client.addUser(metalakeName, username)); + Assertions.assertEquals("metalake not found", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.POST, userPath, request, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.addUser(metalakeName, username), "internal error"); + } + + @Test + public void testGetUsers() throws Exception { + String username = "user"; + String userPath = withSlash(String.format(API_METALAKES_USERS_PATH, metalakeName, username)); + + UserDTO mockUser = mockUserDTO(username); + UserResponse userResponse = new UserResponse(mockUser); + buildMockResource(Method.GET, userPath, null, userResponse, SC_OK); + + User loadedUser = client.getUser(metalakeName, username); + Assertions.assertNotNull(loadedUser); + assertUser(mockUser, loadedUser); + + // test NoSuchUserException + ErrorResponse errResp1 = + ErrorResponse.notFound(NoSuchUserException.class.getSimpleName(), "user not found"); + buildMockResource(Method.GET, userPath, null, errResp1, SC_NOT_FOUND); + Exception ex = + Assertions.assertThrows( + NoSuchUserException.class, () -> client.getUser(metalakeName, username)); + Assertions.assertEquals("user not found", ex.getMessage()); + + // test NoSuchMetalakeException + ErrorResponse errResp2 = + ErrorResponse.notFound(NoSuchMetalakeException.class.getSimpleName(), "metalake not found"); + buildMockResource(Method.GET, userPath, null, errResp2, SC_NOT_FOUND); + ex = + Assertions.assertThrows( + NoSuchMetalakeException.class, () -> client.getUser(metalakeName, username)); + Assertions.assertEquals("metalake not found", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.GET, userPath, null, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.getUser(metalakeName, username), "internal error"); + } + + @Test + public void testRemoveUsers() throws Exception { + String username = "user"; + String userPath = withSlash(String.format(API_METALAKES_USERS_PATH, metalakeName, username)); + + RemoveResponse removeResponse = new RemoveResponse(true); + buildMockResource(Method.DELETE, userPath, null, removeResponse, SC_OK); + + Assertions.assertTrue(client.removeUser(metalakeName, username)); + + removeResponse = new RemoveResponse(false); + buildMockResource(Method.DELETE, userPath, null, removeResponse, SC_OK); + Assertions.assertFalse(client.removeUser(metalakeName, username)); + + // test RuntimeException + ErrorResponse errResp = ErrorResponse.internalError("internal error"); + buildMockResource(Method.DELETE, userPath, null, errResp, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.removeUser(metalakeName, username)); + } + + @Test + public void testAddGroups() throws Exception { + String groupName = "group"; + String groupPath = withSlash(String.format(API_METALAKES_GROUPS_PATH, metalakeName, "")); + GroupAddRequest request = new GroupAddRequest(groupName); + + GroupDTO mockGroup = mockGroupDTO(groupName); + GroupResponse groupResponse = new GroupResponse(mockGroup); + buildMockResource(Method.POST, groupPath, request, groupResponse, SC_OK); + + Group addedGroup = client.addGroup(metalakeName, groupName); + Assertions.assertNotNull(addedGroup); + assertGroup(addedGroup, mockGroup); + + // test GroupAlreadyExistsException + ErrorResponse errResp1 = + ErrorResponse.alreadyExists( + GroupAlreadyExistsException.class.getSimpleName(), "group already exists"); + buildMockResource(Method.POST, groupPath, request, errResp1, SC_CONFLICT); + Exception ex = + Assertions.assertThrows( + GroupAlreadyExistsException.class, () -> client.addGroup(metalakeName, groupName)); + Assertions.assertEquals("group already exists", ex.getMessage()); + + // test NoSuchMetalakeException + ErrorResponse errResp2 = + ErrorResponse.notFound(NoSuchMetalakeException.class.getSimpleName(), "metalake not found"); + buildMockResource(Method.POST, groupPath, request, errResp2, SC_NOT_FOUND); + ex = + Assertions.assertThrows( + NoSuchMetalakeException.class, () -> client.addGroup(metalakeName, groupName)); + Assertions.assertEquals("metalake not found", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.POST, groupPath, request, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.addGroup(metalakeName, groupName), "internal error"); + } + + @Test + public void testGetGroups() throws Exception { + String groupName = "group"; + String groupPath = withSlash(String.format(API_METALAKES_GROUPS_PATH, metalakeName, groupName)); + + GroupDTO mockGroup = mockGroupDTO(groupName); + GroupResponse groupResponse = new GroupResponse(mockGroup); + buildMockResource(Method.GET, groupPath, null, groupResponse, SC_OK); + + Group loadedGroup = client.getGroup(metalakeName, groupName); + Assertions.assertNotNull(loadedGroup); + assertGroup(mockGroup, loadedGroup); + + // test NoSuchGroupException + ErrorResponse errResp1 = + ErrorResponse.notFound(NoSuchGroupException.class.getSimpleName(), "group not found"); + buildMockResource(Method.GET, groupPath, null, errResp1, SC_NOT_FOUND); + Exception ex = + Assertions.assertThrows( + NoSuchGroupException.class, () -> client.getGroup(metalakeName, groupName)); + Assertions.assertEquals("group not found", ex.getMessage()); + + // test NoSuchMetalakeException + ErrorResponse errResp2 = + ErrorResponse.notFound(NoSuchMetalakeException.class.getSimpleName(), "metalake not found"); + buildMockResource(Method.GET, groupPath, null, errResp2, SC_NOT_FOUND); + ex = + Assertions.assertThrows( + NoSuchMetalakeException.class, () -> client.getGroup(metalakeName, groupName)); + Assertions.assertEquals("metalake not found", ex.getMessage()); + + // test RuntimeException + ErrorResponse errResp3 = ErrorResponse.internalError("internal error"); + buildMockResource(Method.GET, groupPath, null, errResp3, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.getGroup(metalakeName, groupName), "internal error"); + } + + @Test + public void testRemoveGroups() throws Exception { + String groupName = "user"; + String groupPath = withSlash(String.format(API_METALAKES_GROUPS_PATH, metalakeName, groupName)); + + RemoveResponse removeResponse = new RemoveResponse(true); + buildMockResource(Method.DELETE, groupPath, null, removeResponse, SC_OK); + + Assertions.assertTrue(client.removeGroup(metalakeName, groupName)); + + removeResponse = new RemoveResponse(false); + buildMockResource(Method.DELETE, groupPath, null, removeResponse, SC_OK); + Assertions.assertFalse(client.removeGroup(metalakeName, groupName)); + + // test RuntimeException + ErrorResponse errResp = ErrorResponse.internalError("internal error"); + buildMockResource(Method.DELETE, groupPath, null, errResp, SC_SERVER_ERROR); + Assertions.assertThrows( + RuntimeException.class, () -> client.removeGroup(metalakeName, groupName)); + } + + private UserDTO mockUserDTO(String name) { + return UserDTO.builder() + .withName(name) + .withAudit(AuditDTO.builder().withCreator("creator").withCreateTime(Instant.now()).build()) + .build(); + } + + private GroupDTO mockGroupDTO(String name) { + return GroupDTO.builder() + .withName(name) + .withAudit(AuditDTO.builder().withCreator("creator").withCreateTime(Instant.now()).build()) + .build(); + } + + private void assertUser(User expected, User actual) { + Assertions.assertEquals(expected.name(), actual.name()); + Assertions.assertEquals(expected.roles(), actual.roles()); + } + + private void assertGroup(Group expected, Group actual) { + Assertions.assertEquals(expected.name(), actual.name()); + Assertions.assertEquals(expected.roles(), actual.roles()); + } +} diff --git a/clients/client-python/build.gradle.kts b/clients/client-python/build.gradle.kts index 21b89367a1f..03bf90f687a 100644 --- a/clients/client-python/build.gradle.kts +++ b/clients/client-python/build.gradle.kts @@ -26,9 +26,9 @@ fun gravitinoServer(operation: String) { val exitCode = process.waitFor() if (exitCode == 0) { val currentContext = process.inputStream.bufferedReader().readText() - println("Current docker context is: $currentContext") + println("Gravitino server status: $currentContext") } else { - println("checkOrbStackStatus Command execution failed with exit code $exitCode") + println("Gravitino server execution failed with exit code $exitCode") } } @@ -39,26 +39,25 @@ tasks { } val test by registering(VenvTask::class) { - dependsOn(pipInstall) - venvExec = "python" - args = listOf("-m", "unittest") - workingDir = projectDir.resolve(".") - } - - val integrationTest by registering(VenvTask::class) { - doFirst() { - gravitinoServer("start") - } + val skipPyClientITs = project.hasProperty("skipPyClientITs") + if (!skipPyClientITs) { + doFirst { + gravitinoServer("start") + } - dependsOn(pipInstall) - venvExec = "python" - args = listOf("-m", "unittest") - workingDir = projectDir.resolve(".") - environment = mapOf("PROJECT_VERSION" to project.version, - "GRADLE_START_GRAVITINO" to "True") + dependsOn(pipInstall) + venvExec = "python" + args = listOf("-m", "unittest") + workingDir = projectDir.resolve(".") + environment = mapOf( + "PROJECT_VERSION" to project.version, + "GRAVITINO_HOME" to project.rootDir.path + "/distribution/package", + "START_EXTERNAL_GRAVITINO" to "true" + ) - doLast { - gravitinoServer("stop") + doLast { + gravitinoServer("stop") + } } } diff --git a/clients/client-python/tests/integration/integration_test_env.py b/clients/client-python/tests/integration/integration_test_env.py index 73cbaed5e01..e02206bf07c 100644 --- a/clients/client-python/tests/integration/integration_test_env.py +++ b/clients/client-python/tests/integration/integration_test_env.py @@ -20,11 +20,11 @@ def get_gravitino_server_version(): response.close() return True except requests.exceptions.RequestException as e: - logger.warning("Failed to access the server: {}", e) + logger.warning("Failed to access the Gravitino server") return False -def check_gravitino_server_status(): +def check_gravitino_server_status() -> bool: gravitino_server_running = False for i in range(5): logger.info("Monitoring Gravitino server status. Attempt %s", i + 1) @@ -53,15 +53,21 @@ class IntegrationTestEnv(unittest.TestCase): def setUpClass(cls): _init_logging() - if os.environ.get('GRADLE_START_GRAVITINO') is not None: - logger.info('Manual start gravitino server [%s].', check_gravitino_server_status()) + if os.environ.get('START_EXTERNAL_GRAVITINO') is not None: + """Maybe Gravitino server already startup by Gradle test command or developer manual startup.""" + if not check_gravitino_server_status(): + logger.error("ERROR: Can't find online Gravitino server!") return - current_path = os.getcwd() - cls.gravitino_startup_script = os.path.join(current_path, '../../../distribution/package/bin/gravitino.sh') + GravitinoHome = os.environ.get('GRAVITINO_HOME') + if GravitinoHome is None: + logger.error('Gravitino Python client integration test must configure `GRAVITINO_HOME`') + quit(0) + + cls.gravitino_startup_script = os.path.join(GravitinoHome, 'bin/gravitino.sh') if not os.path.exists(cls.gravitino_startup_script): logger.error("Can't find Gravitino startup script: %s, " - "Please execute `./gradlew compileDistribution -x test` in the gravitino project root " + "Please execute `./gradlew compileDistribution -x test` in the Gravitino project root " "directory.", cls.gravitino_startup_script) quit(0) @@ -78,11 +84,9 @@ def setUpClass(cls): logger.error("ERROR: Can't start Gravitino server!") quit(0) - cls.clean_test_date() - @classmethod def tearDownClass(cls): - if os.environ.get('GRADLE_START_GRAVITINO') is not None: + if os.environ.get('START_EXTERNAL_GRAVITINO') is not None: return logger.info("Stop integration test environment...") diff --git a/clients/client-python/tests/integration/test_fileset_catalog.py b/clients/client-python/tests/integration/test_fileset_catalog.py index 20d20970392..240547928f5 100644 --- a/clients/client-python/tests/integration/test_fileset_catalog.py +++ b/clients/client-python/tests/integration/test_fileset_catalog.py @@ -3,6 +3,7 @@ This software is licensed under the Apache License version 2. """ import logging +from random import random, randint from gravitino.api.catalog import Catalog from gravitino.api.fileset import Fileset @@ -20,11 +21,11 @@ class TestFilesetCatalog(IntegrationTestEnv): catalog: Catalog = None metalake: GravitinoMetalake = None - metalake_name: str = "testMetalake" - catalog_name: str = "testCatalog" - schema_name: str = "testSchema" - fileset_name: str = "testFileset1" - fileset_alter_name: str = "testFilesetAlter" + metalake_name: str = "testMetalake" + str(randint(1, 100)) + catalog_name: str = "testCatalog" + str(randint(1, 100)) + schema_name: str = "testSchema" + str(randint(1, 100)) + fileset_name: str = "testFileset1" + str(randint(1, 100)) + fileset_alter_name: str = "testFilesetAlter" + str(randint(1, 100)) provider: str = "hadoop" metalake_ident: NameIdentifier = NameIdentifier.of(metalake_name) diff --git a/common/build.gradle.kts b/common/build.gradle.kts index 5a019da140a..9b7cecbbefc 100644 --- a/common/build.gradle.kts +++ b/common/build.gradle.kts @@ -56,7 +56,7 @@ fun getGitCommitId(): String { } val propertiesFile = "src/main/resources/project.properties" -val writeProjectPropertiesFile = tasks.register("writeProjectPropertiesFile") { +fun writeProjectPropertiesFile() { val propertiesFile = file(propertiesFile) if (propertiesFile.exists()) { propertiesFile.delete() @@ -85,13 +85,18 @@ val writeProjectPropertiesFile = tasks.register("writeProjectPropertiesFile") { tasks { jar { - dependsOn(writeProjectPropertiesFile) doFirst() { + writeProjectPropertiesFile() if (!file(propertiesFile).exists()) { throw GradleException("$propertiesFile file not generated!") } } + + from("src/main/resources") { + include("project.properties").duplicatesStrategy = DuplicatesStrategy.EXCLUDE + } } + clean { delete("$propertiesFile") } diff --git a/common/src/main/java/com/datastrato/gravitino/dto/responses/DeleteResponse.java b/common/src/main/java/com/datastrato/gravitino/dto/responses/DeleteResponse.java new file mode 100644 index 00000000000..a524520f256 --- /dev/null +++ b/common/src/main/java/com/datastrato/gravitino/dto/responses/DeleteResponse.java @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.dto.responses; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.EqualsAndHashCode; +import lombok.ToString; + +/** Represents a response for a delete operation. */ +@ToString +@EqualsAndHashCode(callSuper = true) +public class DeleteResponse extends BaseResponse { + + @JsonProperty("deleted") + private final boolean deleted; + + /** + * Constructor for DeleteResponse. + * + * @param deleted Whether the delete operation was successful. + */ + public DeleteResponse(boolean deleted) { + super(0); + this.deleted = deleted; + } + + /** Default constructor for DeleteResponse (used by Jackson deserializer). */ + public DeleteResponse() { + super(); + this.deleted = false; + } + + /** + * Returns whether the delete operation was successful. + * + * @return True if the delete operation was successful, otherwise false. + */ + public boolean deleted() { + return deleted; + } +} diff --git a/core/build.gradle.kts b/core/build.gradle.kts index 983bbb59c02..a07b29ae5b7 100644 --- a/core/build.gradle.kts +++ b/core/build.gradle.kts @@ -28,7 +28,9 @@ dependencies { implementation(libs.rocksdbjni) annotationProcessor(libs.lombok) + compileOnly(libs.lombok) + compileOnly(libs.servlet) // fix error-prone compile error testAnnotationProcessor(libs.lombok) testCompileOnly(libs.lombok) diff --git a/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java b/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java index cbc11ca53e0..9f801b054a3 100644 --- a/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java +++ b/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java @@ -11,15 +11,19 @@ import com.datastrato.gravitino.catalog.CatalogManager; import com.datastrato.gravitino.catalog.FilesetDispatcher; import com.datastrato.gravitino.catalog.FilesetEventDispatcher; +import com.datastrato.gravitino.catalog.FilesetNormalizeDispatcher; import com.datastrato.gravitino.catalog.FilesetOperationDispatcher; import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.catalog.SchemaEventDispatcher; +import com.datastrato.gravitino.catalog.SchemaNormalizeDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; import com.datastrato.gravitino.catalog.TableDispatcher; import com.datastrato.gravitino.catalog.TableEventDispatcher; +import com.datastrato.gravitino.catalog.TableNormalizeDispatcher; import com.datastrato.gravitino.catalog.TableOperationDispatcher; import com.datastrato.gravitino.catalog.TopicDispatcher; import com.datastrato.gravitino.catalog.TopicEventDispatcher; +import com.datastrato.gravitino.catalog.TopicNormalizeDispatcher; import com.datastrato.gravitino.catalog.TopicOperationDispatcher; import com.datastrato.gravitino.listener.EventBus; import com.datastrato.gravitino.listener.EventListenerManager; @@ -155,16 +159,27 @@ public void initialize(Config config) { SchemaOperationDispatcher schemaOperationDispatcher = new SchemaOperationDispatcher(catalogManager, entityStore, idGenerator); - this.schemaDispatcher = new SchemaEventDispatcher(eventBus, schemaOperationDispatcher); + SchemaNormalizeDispatcher schemaNormalizeDispatcher = + new SchemaNormalizeDispatcher(schemaOperationDispatcher); + this.schemaDispatcher = new SchemaEventDispatcher(eventBus, schemaNormalizeDispatcher); + TableOperationDispatcher tableOperationDispatcher = new TableOperationDispatcher(catalogManager, entityStore, idGenerator); - this.tableDispatcher = new TableEventDispatcher(eventBus, tableOperationDispatcher); + TableNormalizeDispatcher tableNormalizeDispatcher = + new TableNormalizeDispatcher(tableOperationDispatcher); + this.tableDispatcher = new TableEventDispatcher(eventBus, tableNormalizeDispatcher); + FilesetOperationDispatcher filesetOperationDispatcher = new FilesetOperationDispatcher(catalogManager, entityStore, idGenerator); - this.filesetDispatcher = new FilesetEventDispatcher(eventBus, filesetOperationDispatcher); + FilesetNormalizeDispatcher filesetNormalizeDispatcher = + new FilesetNormalizeDispatcher(filesetOperationDispatcher); + this.filesetDispatcher = new FilesetEventDispatcher(eventBus, filesetNormalizeDispatcher); + TopicOperationDispatcher topicOperationDispatcher = new TopicOperationDispatcher(catalogManager, entityStore, idGenerator); - this.topicDispatcher = new TopicEventDispatcher(eventBus, topicOperationDispatcher); + TopicNormalizeDispatcher topicNormalizeDispatcher = + new TopicNormalizeDispatcher(topicOperationDispatcher); + this.topicDispatcher = new TopicEventDispatcher(eventBus, topicNormalizeDispatcher); // Create and initialize access control related modules boolean enableAuthorization = config.get(Configs.ENABLE_AUTHORIZATION); diff --git a/core/src/main/java/com/datastrato/gravitino/authorization/AccessControlManager.java b/core/src/main/java/com/datastrato/gravitino/authorization/AccessControlManager.java index 131153f4512..35c3ea99f47 100644 --- a/core/src/main/java/com/datastrato/gravitino/authorization/AccessControlManager.java +++ b/core/src/main/java/com/datastrato/gravitino/authorization/AccessControlManager.java @@ -8,6 +8,7 @@ import com.datastrato.gravitino.EntityStore; import com.datastrato.gravitino.exceptions.GroupAlreadyExistsException; import com.datastrato.gravitino.exceptions.NoSuchGroupException; +import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; import com.datastrato.gravitino.exceptions.NoSuchRoleException; import com.datastrato.gravitino.exceptions.NoSuchUserException; import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; @@ -46,13 +47,15 @@ public AccessControlManager(EntityStore store, IdGenerator idGenerator, Config c * Adds a new User. * * @param metalake The Metalake of the User. - * @param name The name of the User. + * @param user The name of the User. * @return The added User instance. - * @throws UserAlreadyExistsException If a User with the same identifier already exists. + * @throws UserAlreadyExistsException If a User with the same name already exists. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If adding the User encounters storage issues. */ - public User addUser(String metalake, String name) throws UserAlreadyExistsException { - return doWithNonAdminLock(() -> userGroupManager.addUser(metalake, name)); + public User addUser(String metalake, String user) + throws UserAlreadyExistsException, NoSuchMetalakeException { + return doWithNonAdminLock(() -> userGroupManager.addUser(metalake, user)); } /** @@ -60,10 +63,12 @@ public User addUser(String metalake, String name) throws UserAlreadyExistsExcept * * @param metalake The Metalake of the User. * @param user The name of the User. - * @return `true` if the User was successfully removed, `false` otherwise. + * @return `true` if the User was successfully removed, `false` only when there's no such user, + * otherwise it will throw an exception. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If removing the User encounters storage issues. */ - public boolean removeUser(String metalake, String user) { + public boolean removeUser(String metalake, String user) throws NoSuchMetalakeException { return doWithNonAdminLock(() -> userGroupManager.removeUser(metalake, user)); } @@ -73,10 +78,12 @@ public boolean removeUser(String metalake, String user) { * @param metalake The Metalake of the User. * @param user The name of the User. * @return The getting User instance. - * @throws NoSuchUserException If the User with the given identifier does not exist. + * @throws NoSuchUserException If the User with the given name does not exist. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If getting the User encounters storage issues. */ - public User getUser(String metalake, String user) throws NoSuchUserException { + public User getUser(String metalake, String user) + throws NoSuchUserException, NoSuchMetalakeException { return doWithNonAdminLock(() -> userGroupManager.getUser(metalake, user)); } @@ -86,10 +93,12 @@ public User getUser(String metalake, String user) throws NoSuchUserException { * @param metalake The Metalake of the Group. * @param group The name of the Group. * @return The Added Group instance. - * @throws GroupAlreadyExistsException If a Group with the same identifier already exists. + * @throws GroupAlreadyExistsException If a Group with the same name already exists. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If adding the Group encounters storage issues. */ - public Group addGroup(String metalake, String group) throws GroupAlreadyExistsException { + public Group addGroup(String metalake, String group) + throws GroupAlreadyExistsException, NoSuchMetalakeException { return doWithNonAdminLock(() -> userGroupManager.addGroup(metalake, group)); } @@ -98,10 +107,12 @@ public Group addGroup(String metalake, String group) throws GroupAlreadyExistsEx * * @param metalake The Metalake of the Group. * @param group THe name of the Group. - * @return `true` if the Group was successfully removed, `false` otherwise. + * @return `true` if the Group was successfully removed, `false` only when there's no such group, + * otherwise it will throw an exception. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If removing the Group encounters storage issues. */ - public boolean removeGroup(String metalake, String group) { + public boolean removeGroup(String metalake, String group) throws NoSuchMetalakeException { return doWithNonAdminLock(() -> userGroupManager.removeGroup(metalake, group)); } @@ -111,10 +122,12 @@ public boolean removeGroup(String metalake, String group) { * @param metalake The Metalake of the Group. * @param group The name of the Group. * @return The getting Group instance. - * @throws NoSuchGroupException If the Group with the given identifier does not exist. + * @throws NoSuchGroupException If the Group with the given name does not exist. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If getting the Group encounters storage issues. */ - public Group getGroup(String metalake, String group) throws NoSuchGroupException { + public Group getGroup(String metalake, String group) + throws NoSuchGroupException, NoSuchMetalakeException { return doWithNonAdminLock(() -> userGroupManager.getGroup(metalake, group)); } @@ -187,10 +200,10 @@ public boolean revokeRoleFromUser(String metalake, String role, String user) { * * @param user The name of the User. * @return The added User instance. - * @throws UserAlreadyExistsException If a User with the same identifier already exists. + * @throws UserAlreadyExistsException If a metalake admin with the same name already exists. * @throws RuntimeException If adding the User encounters storage issues. */ - public User addMetalakeAdmin(String user) { + public User addMetalakeAdmin(String user) throws UserAlreadyExistsException { return doWithAdminLock(() -> adminManager.addMetalakeAdmin(user)); } @@ -198,7 +211,8 @@ public User addMetalakeAdmin(String user) { * Removes a metalake admin. * * @param user The name of the User. - * @return `true` if the User was successfully removed, `false` otherwise. + * @return `true` if the User was successfully removed, `false` only when there's no such metalake + * admin, otherwise it will throw an exception. * @throws RuntimeException If removing the User encounters storage issues. */ public boolean removeMetalakeAdmin(String user) { @@ -234,7 +248,8 @@ public boolean isMetalakeAdmin(String user) { * @param securableObject The securable object of the Role. * @param privileges The privileges of the Role. * @return The created Role instance. - * @throws RoleAlreadyExistsException If a Role with the same identifier already exists. + * @throws RoleAlreadyExistsException If a Role with the same name already exists. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. * @throws RuntimeException If creating the Role encounters storage issues. */ public Role createRole( @@ -243,34 +258,38 @@ public Role createRole( Map properties, SecurableObject securableObject, List privileges) - throws RoleAlreadyExistsException { + throws RoleAlreadyExistsException, NoSuchMetalakeException { return doWithNonAdminLock( () -> roleManager.createRole(metalake, role, properties, securableObject, privileges)); } /** - * Loads a Role. + * Gets a Role. * * @param metalake The Metalake of the Role. * @param role The name of the Role. - * @return The loading Role instance. - * @throws NoSuchRoleException If the Role with the given identifier does not exist. - * @throws RuntimeException If loading the Role encounters storage issues. + * @return The getting Role instance. + * @throws NoSuchRoleException If the Role with the given name does not exist. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If getting the Role encounters storage issues. */ - public Role loadRole(String metalake, String role) throws NoSuchRoleException { - return doWithNonAdminLock(() -> roleManager.loadRole(metalake, role)); + public Role getRole(String metalake, String role) + throws NoSuchRoleException, NoSuchMetalakeException { + return doWithNonAdminLock(() -> roleManager.getRole(metalake, role)); } /** - * Drops a Role. + * Deletes a Role. * * @param metalake The Metalake of the Role. * @param role The name of the Role. - * @return `true` if the Role was successfully dropped, `false` otherwise. - * @throws RuntimeException If dropping the User encounters storage issues. + * @return `true` if the Role was successfully deleted, `false` only when there's no such role, + * otherwise it will throw an exception. + * @throws NoSuchMetalakeException If the Metalake with the given name does not exist. + * @throws RuntimeException If deleting the Role encounters storage issues. */ - public boolean dropRole(String metalake, String role) { - return doWithNonAdminLock(() -> roleManager.dropRole(metalake, role)); + public boolean deleteRole(String metalake, String role) throws NoSuchMetalakeException { + return doWithNonAdminLock(() -> roleManager.deleteRole(metalake, role)); } @VisibleForTesting diff --git a/core/src/main/java/com/datastrato/gravitino/authorization/PermissionManager.java b/core/src/main/java/com/datastrato/gravitino/authorization/PermissionManager.java index 4453e68d255..a77eb191336 100644 --- a/core/src/main/java/com/datastrato/gravitino/authorization/PermissionManager.java +++ b/core/src/main/java/com/datastrato/gravitino/authorization/PermissionManager.java @@ -44,7 +44,7 @@ class PermissionManager { boolean grantRoleToUser(String metalake, String role, String user) { try { - RoleEntity roleEntity = roleManager.loadRole(metalake, role); + RoleEntity roleEntity = roleManager.getRole(metalake, role); store.update( AuthorizationUtils.ofUser(metalake, user), @@ -98,7 +98,7 @@ boolean grantRoleToUser(String metalake, String role, String user) { boolean grantRoleToGroup(String metalake, String role, String group) { try { - RoleEntity roleEntity = roleManager.loadRole(metalake, role); + RoleEntity roleEntity = roleManager.getRole(metalake, role); store.update( AuthorizationUtils.ofGroup(metalake, group), @@ -152,7 +152,7 @@ boolean grantRoleToGroup(String metalake, String role, String group) { boolean revokeRoleFromGroup(String metalake, String role, String group) { try { - RoleEntity roleEntity = roleManager.loadRole(metalake, role); + RoleEntity roleEntity = roleManager.getRole(metalake, role); AtomicBoolean removed = new AtomicBoolean(true); @@ -212,7 +212,7 @@ boolean revokeRoleFromGroup(String metalake, String role, String group) { boolean revokeRoleFromUser(String metalake, String role, String user) { try { - RoleEntity roleEntity = roleManager.loadRole(metalake, role); + RoleEntity roleEntity = roleManager.getRole(metalake, role); AtomicBoolean removed = new AtomicBoolean(true); store.update( diff --git a/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java b/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java index ce396599967..71d72c36e41 100644 --- a/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java +++ b/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java @@ -108,7 +108,7 @@ RoleEntity createRole( } } - RoleEntity loadRole(String metalake, String role) throws NoSuchRoleException { + RoleEntity getRole(String metalake, String role) throws NoSuchRoleException { try { AuthorizationUtils.checkMetalakeExists(metalake); return getRoleEntity(AuthorizationUtils.ofRole(metalake, role)); @@ -118,7 +118,7 @@ RoleEntity loadRole(String metalake, String role) throws NoSuchRoleException { } } - boolean dropRole(String metalake, String role) { + boolean deleteRole(String metalake, String role) { try { AuthorizationUtils.checkMetalakeExists(metalake); NameIdentifier ident = AuthorizationUtils.ofRole(metalake, role); diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java b/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java index d08587987ba..2ed2aca6e41 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java @@ -5,11 +5,25 @@ package com.datastrato.gravitino.catalog; import static com.datastrato.gravitino.rel.Column.DEFAULT_VALUE_NOT_SET; +import static com.datastrato.gravitino.rel.expressions.transforms.Transforms.NAME_OF_IDENTITY; +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.file.FilesetChange; import com.datastrato.gravitino.rel.Column; import com.datastrato.gravitino.rel.TableChange; import com.datastrato.gravitino.rel.expressions.Expression; +import com.datastrato.gravitino.rel.expressions.FunctionExpression; +import com.datastrato.gravitino.rel.expressions.NamedReference; +import com.datastrato.gravitino.rel.expressions.distributions.Distribution; +import com.datastrato.gravitino.rel.expressions.distributions.Distributions; +import com.datastrato.gravitino.rel.expressions.sorts.SortOrder; +import com.datastrato.gravitino.rel.expressions.sorts.SortOrders; +import com.datastrato.gravitino.rel.expressions.transforms.Transform; +import com.datastrato.gravitino.rel.expressions.transforms.Transforms; +import com.datastrato.gravitino.rel.indexes.Index; +import com.datastrato.gravitino.rel.indexes.Indexes; import com.google.common.base.Preconditions; import java.util.Arrays; @@ -25,22 +39,244 @@ public static TableChange[] applyCapabilities(Capability capabilities, TableChan return Arrays.stream(changes) .map( change -> { - if (change instanceof TableChange.AddColumn) { - return applyCapabilities((TableChange.AddColumn) change, capabilities); + if (change instanceof TableChange.ColumnChange) { + return applyCapabilities((TableChange.ColumnChange) change, capabilities); - } else if (change instanceof TableChange.UpdateColumnNullability) { - return applyCapabilities( - (TableChange.UpdateColumnNullability) change, capabilities); - - } else if (change instanceof TableChange.UpdateColumnDefaultValue) { - return applyCapabilities( - ((TableChange.UpdateColumnDefaultValue) change), capabilities); + } else if (change instanceof TableChange.RenameTable) { + return applyCapabilities((TableChange.RenameTable) change, capabilities); } return change; }) .toArray(TableChange[]::new); } + public static FilesetChange[] applyCapabilities( + Capability capabilities, FilesetChange... changes) { + return Arrays.stream(changes) + .map( + change -> { + if (change instanceof FilesetChange.RenameFileset) { + return applyCapabilities((FilesetChange.RenameFileset) change, capabilities); + } + return change; + }) + .toArray(FilesetChange[]::new); + } + + public static NameIdentifier[] applyCapabilities( + NameIdentifier[] idents, Capability.Scope scope, Capability capabilities) { + return Arrays.stream(idents) + .map(ident -> applyCapabilities(ident, scope, capabilities)) + .toArray(NameIdentifier[]::new); + } + + public static NameIdentifier applyCapabilities( + NameIdentifier ident, Capability.Scope scope, Capability capabilities) { + Namespace namespace = ident.namespace(); + namespace = applyCapabilities(namespace, scope, capabilities); + + String name = applyCapabilitiesOnName(scope, ident.name(), capabilities); + return NameIdentifier.of(namespace, name); + } + + public static Transform[] applyCapabilities(Transform[] transforms, Capability capabilities) { + return Arrays.stream(transforms) + .map(t -> applyCapabilities(t, capabilities)) + .toArray(Transform[]::new); + } + + public static Distribution applyCapabilities(Distribution distribution, Capability capabilities) { + Expression[] expressions = applyCapabilities(distribution.expressions(), capabilities); + return Distributions.of(distribution.strategy(), distribution.number(), expressions); + } + + public static SortOrder[] applyCapabilities(SortOrder[] sortOrders, Capability capabilities) { + return Arrays.stream(sortOrders) + .map(s -> applyCapabilities(s, capabilities)) + .toArray(SortOrder[]::new); + } + + public static Index[] applyCapabilities(Index[] indexes, Capability capabilities) { + return Arrays.stream(indexes) + .map(i -> applyCapabilities(i, capabilities)) + .toArray(Index[]::new); + } + + public static Namespace applyCapabilities( + Namespace namespace, Capability.Scope identScope, Capability capabilities) { + String metalake = namespace.level(0); + String catalog = namespace.level(1); + if (identScope == Capability.Scope.TABLE + || identScope == Capability.Scope.FILESET + || identScope == Capability.Scope.TOPIC) { + String schema = namespace.level(namespace.length() - 1); + schema = applyCapabilitiesOnName(Capability.Scope.SCHEMA, schema, capabilities); + return Namespace.of(metalake, catalog, schema); + } + return namespace; + } + + private static Index applyCapabilities(Index index, Capability capabilities) { + return Indexes.of( + index.type(), index.name(), applyCapabilities(index.fieldNames(), capabilities)); + } + + private static String[][] applyCapabilities(String[][] fieldNames, Capability capabilities) { + String[][] standardizeFieldNames = new String[fieldNames.length][]; + for (int i = 0; i < standardizeFieldNames.length; i++) { + standardizeFieldNames[i] = applyCapabilities(fieldNames[i], capabilities); + } + return standardizeFieldNames; + } + + private static String[] applyCapabilities(String[] fieldName, Capability capabilities) { + String[] sensitiveOnColumnName = applyCaseSensitiveOnColumnName(fieldName, capabilities); + applyNameSpecification(Capability.Scope.COLUMN, sensitiveOnColumnName[0], capabilities); + return sensitiveOnColumnName; + } + + private static Transform applyCapabilities(Transform transform, Capability capabilities) { + if (transform instanceof Transform.SingleFieldTransform) { + String[] standardizeFieldName = + applyCapabilities(((Transform.SingleFieldTransform) transform).fieldName(), capabilities); + switch (transform.name()) { + case NAME_OF_IDENTITY: + return Transforms.identity(standardizeFieldName); + case Transforms.NAME_OF_YEAR: + return Transforms.year(standardizeFieldName); + case Transforms.NAME_OF_MONTH: + return Transforms.month(standardizeFieldName); + case Transforms.NAME_OF_DAY: + return Transforms.day(standardizeFieldName); + case Transforms.NAME_OF_HOUR: + return Transforms.hour(standardizeFieldName); + default: + throw new IllegalArgumentException("Unsupported transform: " + transform.name()); + } + + } else if (transform instanceof Transforms.BucketTransform) { + Transforms.BucketTransform bucketTransform = (Transforms.BucketTransform) transform; + return Transforms.bucket( + bucketTransform.numBuckets(), + applyCapabilities(bucketTransform.fieldNames(), capabilities)); + + } else if (transform instanceof Transforms.TruncateTransform) { + Transforms.TruncateTransform truncateTransform = (Transforms.TruncateTransform) transform; + return Transforms.truncate( + truncateTransform.width(), + applyCapabilities(truncateTransform.fieldName(), capabilities)); + + } else if (transform instanceof Transforms.ListTransform) { + return Transforms.list( + applyCapabilities(((Transforms.ListTransform) transform).fieldNames(), capabilities)); + + } else if (transform instanceof Transforms.RangeTransform) { + return Transforms.range( + applyCapabilities(((Transforms.RangeTransform) transform).fieldName(), capabilities)); + + } else if (transform instanceof Transforms.ApplyTransform) { + return Transforms.apply( + transform.name(), applyCapabilities(transform.arguments(), capabilities)); + + } else { + throw new IllegalArgumentException("Unsupported transform: " + transform.name()); + } + } + + private static SortOrder applyCapabilities(SortOrder sortOrder, Capability capabilities) { + Expression expression = applyCapabilities(sortOrder.expression(), capabilities); + return SortOrders.of(expression, sortOrder.direction(), sortOrder.nullOrdering()); + } + + private static Expression[] applyCapabilities(Expression[] expressions, Capability capabilities) { + return Arrays.stream(expressions) + .map(e -> applyCapabilities(e, capabilities)) + .toArray(Expression[]::new); + } + + private static Expression applyCapabilities(Expression expression, Capability capabilities) { + if (expression instanceof NamedReference.FieldReference) { + NamedReference.FieldReference ref = (NamedReference.FieldReference) expression; + String[] fieldName = applyCapabilities(ref.fieldName(), capabilities); + return NamedReference.field(fieldName); + + } else if (expression instanceof FunctionExpression) { + FunctionExpression functionExpression = (FunctionExpression) expression; + return FunctionExpression.of( + functionExpression.functionName(), + applyCapabilities(functionExpression.arguments(), capabilities)); + } + return expression; + } + + private static FilesetChange applyCapabilities( + FilesetChange.RenameFileset renameFileset, Capability capabilities) { + String newName = + applyCaseSensitiveOnName( + Capability.Scope.FILESET, renameFileset.getNewName(), capabilities); + applyNameSpecification(Capability.Scope.FILESET, newName, capabilities); + return FilesetChange.rename(newName); + } + + private static TableChange applyCapabilities( + TableChange.RenameTable renameTable, Capability capabilities) { + String newName = + applyCaseSensitiveOnName(Capability.Scope.TABLE, renameTable.getNewName(), capabilities); + applyNameSpecification(Capability.Scope.TABLE, newName, capabilities); + return TableChange.rename(newName); + } + + private static TableChange applyCapabilities( + TableChange.ColumnChange change, Capability capabilities) { + String[] fieldName = applyCaseSensitiveOnColumnName(change.fieldName(), capabilities); + applyNameSpecification(Capability.Scope.COLUMN, fieldName[0], capabilities); + + if (change instanceof TableChange.AddColumn) { + return applyCapabilities((TableChange.AddColumn) change, capabilities); + + } else if (change instanceof TableChange.UpdateColumnNullability) { + return applyCapabilities((TableChange.UpdateColumnNullability) change, capabilities); + + } else if (change instanceof TableChange.UpdateColumnDefaultValue) { + return applyCapabilities(((TableChange.UpdateColumnDefaultValue) change), capabilities); + + } else if (change instanceof TableChange.RenameColumn) { + return applyCapabilities((TableChange.RenameColumn) change, capabilities); + + } else if (change instanceof TableChange.DeleteColumn) { + return TableChange.deleteColumn(fieldName, ((TableChange.DeleteColumn) change).getIfExists()); + + } else if (change instanceof TableChange.UpdateColumnAutoIncrement) { + return TableChange.updateColumnAutoIncrement( + fieldName, ((TableChange.UpdateColumnAutoIncrement) change).isAutoIncrement()); + + } else if (change instanceof TableChange.UpdateColumnComment) { + return TableChange.updateColumnComment( + fieldName, ((TableChange.UpdateColumnComment) change).getNewComment()); + + } else if (change instanceof TableChange.UpdateColumnPosition) { + TableChange.UpdateColumnPosition updateColumnPosition = + (TableChange.UpdateColumnPosition) change; + if (updateColumnPosition.getPosition() instanceof TableChange.After) { + TableChange.After afterPosition = (TableChange.After) updateColumnPosition.getPosition(); + String afterFieldName = + applyCaseSensitiveOnName( + Capability.Scope.COLUMN, afterPosition.getColumn(), capabilities); + applyNameSpecification(Capability.Scope.COLUMN, afterFieldName, capabilities); + return TableChange.updateColumnPosition( + fieldName, TableChange.ColumnPosition.after(afterFieldName)); + } + return TableChange.updateColumnPosition(fieldName, updateColumnPosition.getPosition()); + + } else if (change instanceof TableChange.UpdateColumnType) { + return TableChange.updateColumnType( + fieldName, ((TableChange.UpdateColumnType) change).getNewDataType()); + + } else { + throw new IllegalArgumentException("Unsupported column change: " + change); + } + } + private static TableChange applyCapabilities( TableChange.AddColumn addColumn, Capability capabilities) { Column appliedColumn = @@ -54,8 +290,11 @@ private static TableChange applyCapabilities( addColumn.getDefaultValue()), capabilities); + String[] standardizeFieldName = + Arrays.copyOf(addColumn.fieldName(), addColumn.fieldName().length); + standardizeFieldName[0] = appliedColumn.name(); return TableChange.addColumn( - applyCaseSensitiveOnColumnName(addColumn.fieldName(), capabilities), + standardizeFieldName, appliedColumn.dataType(), appliedColumn.comment(), addColumn.getPosition(), @@ -89,13 +328,22 @@ private static TableChange applyCapabilities( updateColumnDefaultValue.getNewDefaultValue()); } + private static TableChange applyCapabilities( + TableChange.RenameColumn renameColumn, Capability capabilities) { + String[] fieldName = applyCapabilities(renameColumn.fieldName(), capabilities); + String newName = renameColumn.getNewName(); + if (fieldName.length == 1) { + newName = applyCapabilitiesOnName(Capability.Scope.COLUMN, newName, capabilities); + } + return TableChange.renameColumn(fieldName, newName); + } + private static Column applyCapabilities(Column column, Capability capabilities) { applyColumnNotNull(column, capabilities); applyColumnDefaultValue(column, capabilities); - applyNameSpecification(Capability.Scope.COLUMN, column.name(), capabilities); return Column.of( - applyCaseSensitiveOnName(Capability.Scope.COLUMN, column.name(), capabilities), + applyCapabilitiesOnName(Capability.Scope.COLUMN, column.name(), capabilities), column.dataType(), column.comment(), column.nullable(), @@ -103,6 +351,13 @@ private static Column applyCapabilities(Column column, Capability capabilities) column.defaultValue()); } + private static String applyCapabilitiesOnName( + Capability.Scope scope, String name, Capability capabilities) { + String standardizeName = applyCaseSensitiveOnName(scope, name, capabilities); + applyNameSpecification(scope, standardizeName, capabilities); + return standardizeName; + } + private static String applyCaseSensitiveOnName( Capability.Scope scope, String name, Capability capabilities) { return capabilities.caseSensitiveOnName(scope).supported() ? name : name.toLowerCase(); diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/FilesetNormalizeDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetNormalizeDispatcher.java new file mode 100644 index 00000000000..d89871cbaad --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetNormalizeDispatcher.java @@ -0,0 +1,76 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import static com.datastrato.gravitino.catalog.CapabilityHelpers.applyCapabilities; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; +import com.datastrato.gravitino.exceptions.NoSuchFilesetException; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.file.Fileset; +import com.datastrato.gravitino.file.FilesetChange; +import java.util.Map; + +public class FilesetNormalizeDispatcher implements FilesetDispatcher { + + private final FilesetOperationDispatcher dispatcher; + + public FilesetNormalizeDispatcher(FilesetOperationDispatcher dispatcher) { + this.dispatcher = dispatcher; + } + + @Override + public NameIdentifier[] listFilesets(Namespace namespace) throws NoSuchSchemaException { + Capability capability = dispatcher.getCatalogCapability(namespace); + Namespace standardizedNamespace = + applyCapabilities(namespace, Capability.Scope.FILESET, capability); + NameIdentifier[] identifiers = dispatcher.listFilesets(standardizedNamespace); + return applyCapabilities(identifiers, Capability.Scope.FILESET, capability); + } + + @Override + public Fileset loadFileset(NameIdentifier ident) throws NoSuchFilesetException { + return dispatcher.loadFileset(normalizeNameIdentifier(ident)); + } + + @Override + public boolean filesetExists(NameIdentifier ident) { + return dispatcher.filesetExists(normalizeNameIdentifier(ident)); + } + + @Override + public Fileset createFileset( + NameIdentifier ident, + String comment, + Fileset.Type type, + String storageLocation, + Map properties) + throws NoSuchSchemaException, FilesetAlreadyExistsException { + return dispatcher.createFileset( + normalizeNameIdentifier(ident), comment, type, storageLocation, properties); + } + + @Override + public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes) + throws NoSuchFilesetException, IllegalArgumentException { + Capability capability = dispatcher.getCatalogCapability(ident); + return dispatcher.alterFileset( + applyCapabilities(ident, Capability.Scope.FILESET, capability), + applyCapabilities(capability, changes)); + } + + @Override + public boolean dropFileset(NameIdentifier ident) { + return dispatcher.dropFileset(normalizeNameIdentifier(ident)); + } + + private NameIdentifier normalizeNameIdentifier(NameIdentifier ident) { + Capability capability = dispatcher.getCatalogCapability(ident); + return applyCapabilities(ident, Capability.Scope.FILESET, capability); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java index 9c503185e92..45d69ec0e3f 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/OperationDispatcher.java @@ -14,6 +14,7 @@ import com.datastrato.gravitino.connector.BasePropertiesMetadata; import com.datastrato.gravitino.connector.HasPropertyMetadata; import com.datastrato.gravitino.connector.PropertiesMetadata; +import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.exceptions.IllegalNameIdentifierException; import com.datastrato.gravitino.exceptions.NoSuchEntityException; import com.datastrato.gravitino.file.FilesetChange; @@ -101,6 +102,20 @@ R doWithCatalog( } } + Capability getCatalogCapability(NameIdentifier ident) { + return doWithCatalog( + getCatalogIdentifier(ident), + CatalogManager.CatalogWrapper::capabilities, + IllegalArgumentException.class); + } + + Capability getCatalogCapability(Namespace namespace) { + return doWithCatalog( + getCatalogIdentifier(NameIdentifier.of(namespace.levels())), + CatalogManager.CatalogWrapper::capabilities, + IllegalArgumentException.class); + } + Set getHiddenPropertyNames( NameIdentifier catalogIdent, ThrowableFunction provider, diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java new file mode 100644 index 00000000000..22da2a01606 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaNormalizeDispatcher.java @@ -0,0 +1,68 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import static com.datastrato.gravitino.catalog.CapabilityHelpers.applyCapabilities; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.exceptions.NoSuchCatalogException; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.exceptions.NonEmptySchemaException; +import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; +import com.datastrato.gravitino.rel.Schema; +import com.datastrato.gravitino.rel.SchemaChange; +import java.util.Map; + +public class SchemaNormalizeDispatcher implements SchemaDispatcher { + + private final SchemaOperationDispatcher dispatcher; + + public SchemaNormalizeDispatcher(SchemaOperationDispatcher dispatcher) { + this.dispatcher = dispatcher; + } + + @Override + public NameIdentifier[] listSchemas(Namespace namespace) throws NoSuchCatalogException { + Capability capability = dispatcher.getCatalogCapability(namespace); + Namespace standardizedNamespace = + applyCapabilities(namespace, Capability.Scope.SCHEMA, capability); + NameIdentifier[] identifiers = dispatcher.listSchemas(standardizedNamespace); + return applyCapabilities(identifiers, Capability.Scope.SCHEMA, capability); + } + + @Override + public boolean schemaExists(NameIdentifier ident) { + return dispatcher.schemaExists(normalizeNameIdentifier(ident)); + } + + @Override + public Schema createSchema(NameIdentifier ident, String comment, Map properties) + throws NoSuchCatalogException, SchemaAlreadyExistsException { + return dispatcher.createSchema(normalizeNameIdentifier(ident), comment, properties); + } + + @Override + public Schema loadSchema(NameIdentifier ident) throws NoSuchSchemaException { + return dispatcher.loadSchema(normalizeNameIdentifier(ident)); + } + + @Override + public Schema alterSchema(NameIdentifier ident, SchemaChange... changes) + throws NoSuchSchemaException { + return dispatcher.alterSchema(normalizeNameIdentifier(ident), changes); + } + + @Override + public boolean dropSchema(NameIdentifier ident, boolean cascade) throws NonEmptySchemaException { + return dispatcher.dropSchema(normalizeNameIdentifier(ident), cascade); + } + + private NameIdentifier normalizeNameIdentifier(NameIdentifier ident) { + Capability capability = dispatcher.getCatalogCapability(ident); + return applyCapabilities(ident, Capability.Scope.SCHEMA, capability); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/TableNormalizeDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/TableNormalizeDispatcher.java new file mode 100644 index 00000000000..68784272e79 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/TableNormalizeDispatcher.java @@ -0,0 +1,97 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import static com.datastrato.gravitino.catalog.CapabilityHelpers.applyCapabilities; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.exceptions.NoSuchTableException; +import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; +import com.datastrato.gravitino.rel.Column; +import com.datastrato.gravitino.rel.Table; +import com.datastrato.gravitino.rel.TableChange; +import com.datastrato.gravitino.rel.expressions.distributions.Distribution; +import com.datastrato.gravitino.rel.expressions.sorts.SortOrder; +import com.datastrato.gravitino.rel.expressions.transforms.Transform; +import com.datastrato.gravitino.rel.indexes.Index; +import java.util.Map; + +public class TableNormalizeDispatcher implements TableDispatcher { + + private final TableOperationDispatcher dispatcher; + + public TableNormalizeDispatcher(TableOperationDispatcher dispatcher) { + this.dispatcher = dispatcher; + } + + @Override + public NameIdentifier[] listTables(Namespace namespace) throws NoSuchSchemaException { + Capability capability = dispatcher.getCatalogCapability(namespace); + Namespace standardizedNamespace = + applyCapabilities(namespace, Capability.Scope.TABLE, capability); + NameIdentifier[] identifiers = dispatcher.listTables(standardizedNamespace); + return applyCapabilities(identifiers, Capability.Scope.TABLE, capability); + } + + @Override + public Table loadTable(NameIdentifier ident) throws NoSuchTableException { + return dispatcher.loadTable(normalizeNameIdentifier(ident)); + } + + @Override + public Table createTable( + NameIdentifier ident, + Column[] columns, + String comment, + Map properties, + Transform[] partitions, + Distribution distribution, + SortOrder[] sortOrders, + Index[] indexes) + throws NoSuchSchemaException, TableAlreadyExistsException { + Capability capability = dispatcher.getCatalogCapability(ident); + return dispatcher.createTable( + applyCapabilities(ident, Capability.Scope.TABLE, capability), + applyCapabilities(columns, capability), + comment, + properties, + applyCapabilities(partitions, capability), + applyCapabilities(distribution, capability), + applyCapabilities(sortOrders, capability), + applyCapabilities(indexes, capability)); + } + + @Override + public Table alterTable(NameIdentifier ident, TableChange... changes) + throws NoSuchTableException, IllegalArgumentException { + Capability capability = dispatcher.getCatalogCapability(ident); + return dispatcher.alterTable( + applyCapabilities(ident, Capability.Scope.TABLE, capability), + applyCapabilities(capability, changes)); + } + + @Override + public boolean dropTable(NameIdentifier ident) { + return dispatcher.dropTable(normalizeNameIdentifier(ident)); + } + + @Override + public boolean purgeTable(NameIdentifier ident) throws UnsupportedOperationException { + return dispatcher.purgeTable(normalizeNameIdentifier(ident)); + } + + @Override + public boolean tableExists(NameIdentifier ident) { + return dispatcher.tableExists(normalizeNameIdentifier(ident)); + } + + private NameIdentifier normalizeNameIdentifier(NameIdentifier ident) { + Capability capability = dispatcher.getCatalogCapability(ident); + return applyCapabilities(ident, Capability.Scope.TABLE, capability); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/TableOperationDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/TableOperationDispatcher.java index d388979582f..0fc7e187dc3 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/TableOperationDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/TableOperationDispatcher.java @@ -164,7 +164,7 @@ public Table createTable( t -> t.createTable( ident, - applyCapabilities(columns, c.capabilities()), + columns, comment, updatedProperties, partitions == null ? EMPTY_TRANSFORM : partitions, diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/TopicNormalizeDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/TopicNormalizeDispatcher.java new file mode 100644 index 00000000000..753a2f01852 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/TopicNormalizeDispatcher.java @@ -0,0 +1,69 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import static com.datastrato.gravitino.catalog.CapabilityHelpers.applyCapabilities; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.exceptions.NoSuchTopicException; +import com.datastrato.gravitino.exceptions.TopicAlreadyExistsException; +import com.datastrato.gravitino.messaging.DataLayout; +import com.datastrato.gravitino.messaging.Topic; +import com.datastrato.gravitino.messaging.TopicChange; +import java.util.Map; + +public class TopicNormalizeDispatcher implements TopicDispatcher { + + private final TopicOperationDispatcher dispatcher; + + public TopicNormalizeDispatcher(TopicOperationDispatcher dispatcher) { + this.dispatcher = dispatcher; + } + + @Override + public NameIdentifier[] listTopics(Namespace namespace) throws NoSuchSchemaException { + Capability capability = dispatcher.getCatalogCapability(namespace); + Namespace standardizedNamespace = + applyCapabilities(namespace, Capability.Scope.TOPIC, capability); + NameIdentifier[] identifiers = dispatcher.listTopics(standardizedNamespace); + return applyCapabilities(identifiers, Capability.Scope.TOPIC, capability); + } + + @Override + public Topic loadTopic(NameIdentifier ident) throws NoSuchTopicException { + return dispatcher.loadTopic(normalizeNameIdentifier(ident)); + } + + @Override + public boolean topicExists(NameIdentifier ident) { + return dispatcher.topicExists(normalizeNameIdentifier(ident)); + } + + @Override + public Topic createTopic( + NameIdentifier ident, String comment, DataLayout dataLayout, Map properties) + throws NoSuchSchemaException, TopicAlreadyExistsException { + return dispatcher.createTopic(normalizeNameIdentifier(ident), comment, dataLayout, properties); + } + + @Override + public Topic alterTopic(NameIdentifier ident, TopicChange... changes) + throws NoSuchTopicException, IllegalArgumentException { + return dispatcher.alterTopic(normalizeNameIdentifier(ident), changes); + } + + @Override + public boolean dropTopic(NameIdentifier ident) { + return dispatcher.dropTopic(normalizeNameIdentifier(ident)); + } + + private NameIdentifier normalizeNameIdentifier(NameIdentifier ident) { + Capability capability = dispatcher.getCatalogCapability(ident); + return applyCapabilities(ident, Capability.Scope.TOPIC, capability); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java b/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java index 9c6dde58dc3..3303ea7b69c 100644 --- a/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java +++ b/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java @@ -17,7 +17,6 @@ public interface Capability { /** The scope of the capability. */ enum Scope { - CATALOG, SCHEMA, TABLE, COLUMN, diff --git a/core/src/test/java/com/datastrato/gravitino/TestCatalog.java b/core/src/test/java/com/datastrato/gravitino/TestCatalog.java index 0eb79c0b882..bbe4b449a68 100644 --- a/core/src/test/java/com/datastrato/gravitino/TestCatalog.java +++ b/core/src/test/java/com/datastrato/gravitino/TestCatalog.java @@ -8,6 +8,7 @@ import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; +import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.rel.TableCatalog; import java.util.Map; import java.util.Objects; @@ -29,6 +30,11 @@ protected CatalogOperations newOps(Map config) { return new TestCatalogOperations(config); } + @Override + protected Capability newCapability() { + return new TestCatalogCapabilities(); + } + @Override public TableCatalog asTableCatalog() { return (TableCatalog) ops(); diff --git a/core/src/test/java/com/datastrato/gravitino/TestCatalogCapabilities.java b/core/src/test/java/com/datastrato/gravitino/TestCatalogCapabilities.java new file mode 100644 index 00000000000..1874ebf18c2 --- /dev/null +++ b/core/src/test/java/com/datastrato/gravitino/TestCatalogCapabilities.java @@ -0,0 +1,16 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino; + +import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.connector.capability.CapabilityResult; + +public class TestCatalogCapabilities implements Capability { + + @Override + public CapabilityResult caseSensitiveOnName(Scope scope) { + return CapabilityResult.unsupported("The case sensitive on name is not supported."); + } +} diff --git a/core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java b/core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java index dba2b03d307..47b3dccc9fd 100644 --- a/core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java +++ b/core/src/test/java/com/datastrato/gravitino/TestCatalogOperations.java @@ -167,6 +167,7 @@ public Table alterTable(NameIdentifier ident, TableChange... changes) Map newProps = table.properties() != null ? Maps.newHashMap(table.properties()) : Maps.newHashMap(); + NameIdentifier newIdent = ident; for (TableChange change : changes) { if (change instanceof TableChange.SetProperty) { newProps.put( @@ -174,6 +175,12 @@ public Table alterTable(NameIdentifier ident, TableChange... changes) ((TableChange.SetProperty) change).getValue()); } else if (change instanceof TableChange.RemoveProperty) { newProps.remove(((TableChange.RemoveProperty) change).getProperty()); + } else if (change instanceof TableChange.RenameTable) { + String newName = ((TableChange.RenameTable) change).getNewName(); + newIdent = NameIdentifier.of(ident.namespace(), newName); + if (tables.containsKey(newIdent)) { + throw new TableAlreadyExistsException("Table %s already exists", ident); + } } else { throw new IllegalArgumentException("Unsupported table change: " + change); } @@ -181,23 +188,19 @@ public Table alterTable(NameIdentifier ident, TableChange... changes) TestTable updatedTable = TestTable.builder() - .withName(ident.name()) + .withName(newIdent.name()) .withComment(table.comment()) .withProperties(new HashMap<>(newProps)) .withAuditInfo(updatedAuditInfo) .withColumns(table.columns()) .withPartitioning(table.partitioning()) + .withDistribution(table.distribution()) + .withSortOrders(table.sortOrder()) + .withIndexes(table.index()) .build(); tables.put(ident, updatedTable); - return TestTable.builder() - .withName(ident.name()) - .withComment(table.comment()) - .withProperties(new HashMap<>(newProps)) - .withAuditInfo(updatedAuditInfo) - .withColumns(table.columns()) - .withPartitioning(table.partitioning()) - .build(); + return updatedTable; } @Override @@ -440,8 +443,11 @@ public Fileset createFileset( .withStorageLocation(storageLocation) .build(); - if (tables.containsKey(ident)) { + NameIdentifier schemaIdent = NameIdentifier.of(ident.namespace().levels()); + if (filesets.containsKey(ident)) { throw new FilesetAlreadyExistsException("Fileset %s already exists", ident); + } else if (!schemas.containsKey(schemaIdent)) { + throw new NoSuchSchemaException("Schema %s does not exist", schemaIdent); } else { filesets.put(ident, fileset); } @@ -467,6 +473,7 @@ public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes) TestFileset fileset = filesets.get(ident); Map newProps = fileset.properties() != null ? Maps.newHashMap(fileset.properties()) : Maps.newHashMap(); + NameIdentifier newIdent = ident; for (FilesetChange change : changes) { if (change instanceof FilesetChange.SetProperty) { @@ -475,6 +482,13 @@ public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes) ((FilesetChange.SetProperty) change).getValue()); } else if (change instanceof FilesetChange.RemoveProperty) { newProps.remove(((FilesetChange.RemoveProperty) change).getProperty()); + } else if (change instanceof FilesetChange.RenameFileset) { + String newName = ((FilesetChange.RenameFileset) change).getNewName(); + newIdent = NameIdentifier.of(ident.namespace(), newName); + if (filesets.containsKey(newIdent)) { + throw new FilesetAlreadyExistsException("Fileset %s already exists", ident); + } + filesets.remove(ident); } else { throw new IllegalArgumentException("Unsupported fileset change: " + change); } @@ -482,14 +496,14 @@ public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes) TestFileset updatedFileset = TestFileset.builder() - .withName(ident.name()) + .withName(newIdent.name()) .withComment(fileset.comment()) .withProperties(newProps) .withAuditInfo(updatedAuditInfo) .withType(fileset.type()) .withStorageLocation(fileset.storageLocation()) .build(); - filesets.put(ident, updatedFileset); + filesets.put(newIdent, updatedFileset); return updatedFileset; } diff --git a/core/src/test/java/com/datastrato/gravitino/TestTable.java b/core/src/test/java/com/datastrato/gravitino/TestTable.java index 768a50a7209..7b01c050591 100644 --- a/core/src/test/java/com/datastrato/gravitino/TestTable.java +++ b/core/src/test/java/com/datastrato/gravitino/TestTable.java @@ -32,6 +32,7 @@ protected TestTable internalBuild() { table.distribution = distribution; table.sortOrders = sortOrders; table.partitioning = partitioning; + table.indexes = indexes; return table; } } diff --git a/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManager.java b/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManager.java index e65115c3c51..6b69395be53 100644 --- a/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManager.java +++ b/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManager.java @@ -245,9 +245,9 @@ public void testLoadRole() { accessControlManager.createRole( "metalake", "loadRole", props, SecurableObjects.ofAllCatalogs(), Lists.newArrayList()); - Role cachedRole = accessControlManager.loadRole("metalake", "loadRole"); + Role cachedRole = accessControlManager.getRole("metalake", "loadRole"); accessControlManager.getRoleManager().getCache().invalidateAll(); - Role role = accessControlManager.loadRole("metalake", "loadRole"); + Role role = accessControlManager.getRole("metalake", "loadRole"); // Verify the cached roleEntity is correct Assertions.assertEquals(role, cachedRole); @@ -258,8 +258,7 @@ public void testLoadRole() { // Test load non-existed group Throwable exception = Assertions.assertThrows( - NoSuchRoleException.class, - () -> accessControlManager.loadRole("metalake", "not-exist")); + NoSuchRoleException.class, () -> accessControlManager.getRole("metalake", "not-exist")); Assertions.assertTrue(exception.getMessage().contains("Role not-exist does not exist")); } @@ -271,11 +270,11 @@ public void testDropRole() { "metalake", "testDrop", props, SecurableObjects.ofAllCatalogs(), Lists.newArrayList()); // Test drop role - boolean dropped = accessControlManager.dropRole("metalake", "testDrop"); + boolean dropped = accessControlManager.deleteRole("metalake", "testDrop"); Assertions.assertTrue(dropped); // Test drop non-existed role - boolean dropped1 = accessControlManager.dropRole("metalake", "no-exist"); + boolean dropped1 = accessControlManager.deleteRole("metalake", "no-exist"); Assertions.assertFalse(dropped1); } diff --git a/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManagerForPermissions.java b/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManagerForPermissions.java index 00dd6127bc4..cf38825fe3f 100644 --- a/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManagerForPermissions.java +++ b/core/src/test/java/com/datastrato/gravitino/authorization/TestAccessControlManagerForPermissions.java @@ -117,7 +117,7 @@ public static void tearDown() throws IOException { } @Test - public void testAddRoleToUser() { + public void testGrantRoleToUser() { String notExist = "not-exist"; User user = accessControlManager.getUser(METALAKE, USER); @@ -153,7 +153,7 @@ public void testAddRoleToUser() { } @Test - public void testRemoveRoleFromUser() { + public void testRevokeRoleFromUser() { String notExist = "not-exist"; Assertions.assertTrue(accessControlManager.grantRoleToUser(METALAKE, ROLE, USER)); @@ -179,7 +179,7 @@ public void testRemoveRoleFromUser() { } @Test - public void testAddRoleToGroup() { + public void testGrantRoleToGroup() { String notExist = "not-exist"; Group group = accessControlManager.getGroup(METALAKE, GROUP); @@ -216,7 +216,7 @@ public void testAddRoleToGroup() { } @Test - public void testRemoveRoleFormGroup() { + public void testRevokeRoleFormGroup() { String notExist = "not-exist"; Assertions.assertTrue(accessControlManager.grantRoleToGroup(METALAKE, ROLE, GROUP)); @@ -261,7 +261,7 @@ public void testDropRole() throws IOException { entityStore.put(roleEntity, true); Assertions.assertTrue(accessControlManager.grantRoleToUser(METALAKE, anotherRole, USER)); Assertions.assertTrue(accessControlManager.grantRoleToGroup(METALAKE, anotherRole, GROUP)); - accessControlManager.dropRole(METALAKE, anotherRole); + accessControlManager.deleteRole(METALAKE, anotherRole); Group group = accessControlManager.getGroup(METALAKE, GROUP); Assertions.assertTrue(group.roles().isEmpty()); } diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestCatalogManager.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestCatalogManager.java index bd7d66ad282..db043feef1f 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestCatalogManager.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestCatalogManager.java @@ -132,30 +132,6 @@ void testCreateWithHiveProperty() throws IOException { ident, Catalog.Type.RELATIONAL, provider, "comment", props3)); } - @Test - void testLoadTable() throws IOException { - NameIdentifier ident = NameIdentifier.of("metalake", "test444"); - // key1 is required; - Map props1 = - ImmutableMap.builder() - .put("key2", "value2") - .put("key1", "value1") - .put("hidden_key", "hidden_value") - .put("mock", "mock") - .build(); - Assertions.assertDoesNotThrow( - () -> - catalogManager.createCatalog( - ident, Catalog.Type.RELATIONAL, provider, "comment", props1)); - - Map properties = catalogManager.loadCatalog(ident).properties(); - Assertions.assertTrue(properties.containsKey("key2")); - Assertions.assertTrue(properties.containsKey("key1")); - Assertions.assertFalse(properties.containsKey("hidden_key")); - Assertions.assertFalse(properties.containsKey(ID_KEY)); - reset(); - } - @Test void testPropertyValidationInAlter() throws IOException { // key1 is required and immutable and do not have default value, is not hidden and not reserved diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetNormalizeDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetNormalizeDispatcher.java new file mode 100644 index 00000000000..b0ea02cc4e6 --- /dev/null +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetNormalizeDispatcher.java @@ -0,0 +1,75 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; +import com.datastrato.gravitino.file.Fileset; +import com.datastrato.gravitino.file.FilesetChange; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestFilesetNormalizeDispatcher extends TestFilesetOperationDispatcher { + private static FilesetNormalizeDispatcher filesetNormalizeDispatcher; + private static SchemaNormalizeDispatcher schemaNormalizeDispatcher; + + @BeforeAll + public static void initialize() throws IOException { + TestFilesetOperationDispatcher.initialize(); + filesetNormalizeDispatcher = new FilesetNormalizeDispatcher(filesetOperationDispatcher); + schemaNormalizeDispatcher = new SchemaNormalizeDispatcher(schemaOperationDispatcher); + } + + @Test + public void testNameCaseInsensitive() { + Namespace filesetNs = Namespace.of(metalake, catalog, "schema112"); + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + schemaNormalizeDispatcher.createSchema(NameIdentifier.of(filesetNs.levels()), "comment", props); + + // test case-insensitive in creation + NameIdentifier filesetIdent = NameIdentifier.of(filesetNs, "filesetNAME"); + Fileset createdFileset = + filesetNormalizeDispatcher.createFileset( + filesetIdent, "comment", Fileset.Type.MANAGED, "fileset41", props); + Assertions.assertEquals(filesetIdent.name().toLowerCase(), createdFileset.name()); + + // test case-insensitive in loading + Fileset loadedFileset = filesetNormalizeDispatcher.loadFileset(filesetIdent); + Assertions.assertEquals(filesetIdent.name().toLowerCase(), loadedFileset.name()); + + // test case-insensitive in listing + NameIdentifier[] filesets = filesetNormalizeDispatcher.listFilesets(filesetNs); + Arrays.stream(filesets).forEach(s -> Assertions.assertEquals(s.name().toLowerCase(), s.name())); + + // test case-insensitive in altering + Fileset alteredFileset = + filesetNormalizeDispatcher.alterFileset( + NameIdentifier.of(filesetNs, filesetIdent.name().toLowerCase()), + FilesetChange.setProperty("k2", "v2")); + Assertions.assertEquals(filesetIdent.name().toLowerCase(), alteredFileset.name()); + + Exception exception = + Assertions.assertThrows( + FilesetAlreadyExistsException.class, + () -> + filesetNormalizeDispatcher.alterFileset( + NameIdentifier.of(filesetNs, filesetIdent.name().toUpperCase()), + FilesetChange.rename(filesetIdent.name().toUpperCase()))); + Assertions.assertEquals( + "Fileset metalake.catalog.schema112.filesetname already exists", exception.getMessage()); + + // test case-insensitive in dropping + Assertions.assertTrue( + filesetNormalizeDispatcher.dropFileset( + NameIdentifier.of(filesetNs, filesetIdent.name().toUpperCase()))); + Assertions.assertFalse(filesetNormalizeDispatcher.filesetExists(filesetIdent)); + } +} diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetOperationDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetOperationDispatcher.java index 3bcd906f330..a2f97136399 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetOperationDispatcher.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestFilesetOperationDispatcher.java @@ -19,8 +19,8 @@ import org.junit.jupiter.api.Test; public class TestFilesetOperationDispatcher extends TestOperationDispatcher { - private static FilesetOperationDispatcher filesetOperationDispatcher; - private static SchemaOperationDispatcher schemaOperationDispatcher; + static FilesetOperationDispatcher filesetOperationDispatcher; + static SchemaOperationDispatcher schemaOperationDispatcher; @BeforeAll public static void initialize() throws IOException { diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java new file mode 100644 index 00000000000..206ce794e5f --- /dev/null +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaNormalizeDispatcher.java @@ -0,0 +1,57 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.rel.Schema; +import com.datastrato.gravitino.rel.SchemaChange; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Arrays; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestSchemaNormalizeDispatcher extends TestSchemaOperationDispatcher { + private static SchemaNormalizeDispatcher schemaNormalizeDispatcher; + + @BeforeAll + public static void initialize() throws IOException { + TestSchemaOperationDispatcher.initialize(); + schemaNormalizeDispatcher = new SchemaNormalizeDispatcher(dispatcher); + } + + @Test + public void testNameCaseInsensitive() { + // test case-insensitive in creation + NameIdentifier schemaIdent = NameIdentifier.of(metalake, catalog, "schemaNAME"); + Schema createdSchema = + schemaNormalizeDispatcher.createSchema( + schemaIdent, null, ImmutableMap.of("k1", "v1", "k2", "v2")); + Assertions.assertEquals(schemaIdent.name().toLowerCase(), createdSchema.name()); + + // test case-insensitive in loading + Schema loadSchema = schemaNormalizeDispatcher.loadSchema(schemaIdent); + Assertions.assertEquals(schemaIdent.name().toLowerCase(), loadSchema.name()); + + // test case-insensitive in listing + NameIdentifier[] schemas = + schemaNormalizeDispatcher.listSchemas(Namespace.of(metalake, catalog)); + Arrays.stream(schemas).forEach(s -> Assertions.assertEquals(s.name().toLowerCase(), s.name())); + + // test case-insensitive in altering + Schema alteredSchema = + schemaNormalizeDispatcher.alterSchema( + schemaIdent, SchemaChange.setProperty("k2", "v2"), SchemaChange.removeProperty("k1")); + Assertions.assertEquals(schemaIdent.name().toLowerCase(), alteredSchema.name()); + + // test case-insensitive in dropping + Assertions.assertTrue( + schemaNormalizeDispatcher.dropSchema( + NameIdentifier.of(schemaIdent.namespace(), schemaIdent.name().toLowerCase()), false)); + Assertions.assertFalse(schemaNormalizeDispatcher.schemaExists(schemaIdent)); + } +} diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java index d44295364a3..6b830e8a269 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestSchemaOperationDispatcher.java @@ -34,7 +34,7 @@ public class TestSchemaOperationDispatcher extends TestOperationDispatcher { - private static SchemaOperationDispatcher dispatcher; + static SchemaOperationDispatcher dispatcher; @BeforeAll public static void initialize() throws IOException { @@ -54,8 +54,8 @@ public void testCreateAndListSchemas() throws IOException { Assertions.assertEquals("comment", schema.comment()); testProperties(props, schema.properties()); - // Test required table properties exception - Map illegalTableProperties = + // Test required schema properties exception + Map illegalSchemaProperties = new HashMap() { { put("k2", "v2"); @@ -63,14 +63,14 @@ public void testCreateAndListSchemas() throws IOException { }; testPropertyException( - () -> dispatcher.createSchema(schemaIdent, "comment", illegalTableProperties), + () -> dispatcher.createSchema(schemaIdent, "comment", illegalSchemaProperties), "Properties are required and must be set"); // Test reserved table properties exception - illegalTableProperties.put(COMMENT_KEY, "table comment"); - illegalTableProperties.put(ID_KEY, "gravitino.v1.uidfdsafdsa"); + illegalSchemaProperties.put(COMMENT_KEY, "table comment"); + illegalSchemaProperties.put(ID_KEY, "gravitino.v1.uidfdsafdsa"); testPropertyException( - () -> dispatcher.createSchema(schemaIdent, "comment", illegalTableProperties), + () -> dispatcher.createSchema(schemaIdent, "comment", illegalSchemaProperties), "Properties are reserved and cannot be set", "comment", "gravitino.identifier"); diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestTableNormalizeDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestTableNormalizeDispatcher.java new file mode 100644 index 00000000000..be3132050f7 --- /dev/null +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestTableNormalizeDispatcher.java @@ -0,0 +1,117 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.TestColumn; +import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; +import com.datastrato.gravitino.rel.Column; +import com.datastrato.gravitino.rel.Table; +import com.datastrato.gravitino.rel.TableChange; +import com.datastrato.gravitino.rel.expressions.NamedReference; +import com.datastrato.gravitino.rel.expressions.distributions.Distribution; +import com.datastrato.gravitino.rel.expressions.distributions.Distributions; +import com.datastrato.gravitino.rel.expressions.distributions.Strategy; +import com.datastrato.gravitino.rel.expressions.sorts.SortOrder; +import com.datastrato.gravitino.rel.expressions.sorts.SortOrders; +import com.datastrato.gravitino.rel.expressions.transforms.Transform; +import com.datastrato.gravitino.rel.expressions.transforms.Transforms; +import com.datastrato.gravitino.rel.indexes.Index; +import com.datastrato.gravitino.rel.indexes.Indexes; +import com.datastrato.gravitino.rel.types.Types; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestTableNormalizeDispatcher extends TestTableOperationDispatcher { + private static TableNormalizeDispatcher tableNormalizeDispatcher; + private static SchemaNormalizeDispatcher schemaNormalizeDispatcher; + + @BeforeAll + public static void initialize() throws IOException { + TestTableOperationDispatcher.initialize(); + tableNormalizeDispatcher = new TableNormalizeDispatcher(tableOperationDispatcher); + schemaNormalizeDispatcher = new SchemaNormalizeDispatcher(schemaOperationDispatcher); + } + + @Test + public void testNameCaseInsensitive() { + Namespace tableNs = Namespace.of(metalake, catalog, "schema81"); + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + schemaNormalizeDispatcher.createSchema(NameIdentifier.of(tableNs.levels()), "comment", props); + + // test case-insensitive in creation + NameIdentifier tableIdent = NameIdentifier.of(tableNs, "tableNAME"); + Column[] columns = + new Column[] { + TestColumn.builder().withName("colNAME1").withType(Types.StringType.get()).build(), + TestColumn.builder().withName("colNAME2").withType(Types.StringType.get()).build() + }; + Transform[] transforms = new Transform[] {Transforms.identity(columns[0].name())}; + Distribution distribution = + Distributions.fields(Strategy.HASH, 5, new String[] {columns[0].name()}); + SortOrder[] sortOrders = + new SortOrder[] {SortOrders.ascending(NamedReference.field(columns[0].name()))}; + Index[] indexes = new Index[] {Indexes.primary("index1", new String[][] {{columns[0].name()}})}; + Table createdTable = + tableNormalizeDispatcher.createTable( + tableIdent, columns, "comment", props, transforms, distribution, sortOrders, indexes); + assertTableCaseInsensitive(tableIdent, columns, createdTable); + + // test case-insensitive in loading + Table loadedTable = tableNormalizeDispatcher.loadTable(tableIdent); + assertTableCaseInsensitive(tableIdent, columns, loadedTable); + + // test case-insensitive in listing + NameIdentifier[] tableIdents = tableNormalizeDispatcher.listTables(tableNs); + Arrays.stream(tableIdents) + .forEach(s -> Assertions.assertEquals(s.name().toLowerCase(), s.name())); + + // test case-insensitive in altering + Table alteredTable = + tableNormalizeDispatcher.alterTable( + NameIdentifier.of(tableNs, tableIdent.name().toLowerCase()), + TableChange.setProperty("k2", "v2")); + assertTableCaseInsensitive(tableIdent, columns, alteredTable); + + Exception exception = + Assertions.assertThrows( + TableAlreadyExistsException.class, + () -> + tableNormalizeDispatcher.alterTable( + NameIdentifier.of(tableNs, tableIdent.name().toUpperCase()), + TableChange.rename(tableIdent.name().toUpperCase()))); + Assertions.assertEquals( + "Table metalake.catalog.schema81.tablename already exists", exception.getMessage()); + + // test case-insensitive in dropping + Assertions.assertTrue( + tableNormalizeDispatcher.dropTable( + NameIdentifier.of(tableNs, tableIdent.name().toUpperCase()))); + } + + private void assertTableCaseInsensitive( + NameIdentifier tableIdent, Column[] expectedColumns, Table table) { + Assertions.assertEquals(tableIdent.name().toLowerCase(), table.name()); + Assertions.assertEquals(expectedColumns[0].name().toLowerCase(), table.columns()[0].name()); + Assertions.assertEquals(expectedColumns[1].name().toLowerCase(), table.columns()[1].name()); + Assertions.assertEquals( + expectedColumns[0].name().toLowerCase(), + table.partitioning()[0].references()[0].fieldName()[0]); + Assertions.assertEquals( + expectedColumns[0].name().toLowerCase(), + table.distribution().references()[0].fieldName()[0]); + Assertions.assertEquals( + expectedColumns[0].name().toLowerCase(), + table.sortOrder()[0].expression().references()[0].fieldName()[0]); + Assertions.assertEquals( + expectedColumns[0].name().toLowerCase(), table.index()[0].fieldNames()[0][0].toLowerCase()); + } +} diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestTableOperationDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestTableOperationDispatcher.java index e87af83d485..d7cc7236e5c 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestTableOperationDispatcher.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestTableOperationDispatcher.java @@ -37,8 +37,8 @@ import org.junit.jupiter.api.Test; public class TestTableOperationDispatcher extends TestOperationDispatcher { - private static TableOperationDispatcher tableOperationDispatcher; - private static SchemaOperationDispatcher schemaOperationDispatcher; + static TableOperationDispatcher tableOperationDispatcher; + static SchemaOperationDispatcher schemaOperationDispatcher; @BeforeAll public static void initialize() throws IOException { diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicNormalizeDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicNormalizeDispatcher.java new file mode 100644 index 00000000000..8f8cd8e41f0 --- /dev/null +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicNormalizeDispatcher.java @@ -0,0 +1,61 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.messaging.Topic; +import com.datastrato.gravitino.messaging.TopicChange; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Map; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class TestTopicNormalizeDispatcher extends TestTopicOperationDispatcher { + private static TopicNormalizeDispatcher topicNormalizeDispatcher; + private static SchemaNormalizeDispatcher schemaNormalizeDispatcher; + + @BeforeAll + public static void initialize() throws IOException { + TestTopicOperationDispatcher.initialize(); + schemaNormalizeDispatcher = new SchemaNormalizeDispatcher(schemaOperationDispatcher); + topicNormalizeDispatcher = new TopicNormalizeDispatcher(topicOperationDispatcher); + } + + @Test + public void testNameCaseInsensitive() { + Namespace topicNs = Namespace.of(metalake, catalog, "schema161"); + Map props = ImmutableMap.of("k1", "v1", "k2", "v2"); + schemaNormalizeDispatcher.createSchema(NameIdentifier.of(topicNs.levels()), "comment", props); + + // test case-insensitive in creation + NameIdentifier topicIdent = NameIdentifier.of(topicNs, "topicNAME"); + Topic createdTopic = topicNormalizeDispatcher.createTopic(topicIdent, "comment", null, props); + Assertions.assertEquals(topicIdent.name().toLowerCase(), createdTopic.name()); + + // test case-insensitive in loading + Topic loadedTopic = topicNormalizeDispatcher.loadTopic(topicIdent); + Assertions.assertEquals(topicIdent.name().toLowerCase(), loadedTopic.name()); + + // test case-insensitive in listing + NameIdentifier[] idents = topicNormalizeDispatcher.listTopics(topicNs); + Assertions.assertEquals(1, idents.length); + + // test case-insensitive in altering + Topic alteredTopic = + topicNormalizeDispatcher.alterTopic( + NameIdentifier.of(topicNs, topicIdent.name().toLowerCase()), + TopicChange.setProperty("k2", "v2")); + Assertions.assertEquals(topicIdent.name().toLowerCase(), alteredTopic.name()); + + // test case-insensitive in dropping + Assertions.assertTrue( + topicNormalizeDispatcher.dropTopic( + NameIdentifier.of(topicNs, topicIdent.name().toUpperCase()))); + Assertions.assertFalse(topicNormalizeDispatcher.topicExists(topicIdent)); + } +} diff --git a/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicOperationDispatcher.java b/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicOperationDispatcher.java index 22dee4baa4a..7aeb39ec1e0 100644 --- a/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicOperationDispatcher.java +++ b/core/src/test/java/com/datastrato/gravitino/catalog/TestTopicOperationDispatcher.java @@ -30,8 +30,8 @@ public class TestTopicOperationDispatcher extends TestOperationDispatcher { - private static SchemaOperationDispatcher schemaOperationDispatcher; - private static TopicOperationDispatcher topicOperationDispatcher; + static SchemaOperationDispatcher schemaOperationDispatcher; + static TopicOperationDispatcher topicOperationDispatcher; @BeforeAll public static void initialize() throws IOException { diff --git a/docs/how-to-use-gvfs.md b/docs/how-to-use-gvfs.md index cea93de5e81..be0bc3e6fba 100644 --- a/docs/how-to-use-gvfs.md +++ b/docs/how-to-use-gvfs.md @@ -234,9 +234,9 @@ Currently, Gravitino Virtual File System supports two kinds of authentication ty The type of `simple` is the default authentication type in Gravitino Virtual File System. -### How to use simple authentication +### How to use authentication -#### Using `simple` authentication type +#### Using `simple` authentication First, make sure that your Gravitino server is also configured to use the `simple` authentication mode. @@ -260,7 +260,7 @@ Path filesetPath = new Path("gvfs://fileset/test_catalog/test_schema/test_filese FileSystem fs = filesetPath.getFileSystem(conf); ``` -#### Using OAuth authentication +#### Using `OAuth` authentication If you want to use `oauth2` authentication for the Gravitino client in the Gravitino Virtual File System, please refer to this document to complete the configuration of the Gravitino server and the OAuth server: [Security](./security.md). diff --git a/docs/trino-connector/catalog-hive.md b/docs/trino-connector/catalog-hive.md index 70a9ef74e06..f86c9f8e13e 100644 --- a/docs/trino-connector/catalog-hive.md +++ b/docs/trino-connector/catalog-hive.md @@ -44,7 +44,7 @@ per catalog: Users can create a schema with properties through Gravitino Trino connector as follows: ```SQL -CREATE SCHEMA "metalake.catalog".schema_name +CREATE SCHEMA catalog.schema_name ``` ## Table operations @@ -57,7 +57,7 @@ allowing null values, and adding comments. The Gravitino connector does not supp The following example shows how to create a table in the Hive catalog: ```shell -CREATE TABLE "metalake.catalog".schema_name.table_name +CREATE TABLE catalog.schema_name.table_name ( name varchar, salary int @@ -112,7 +112,7 @@ Reserved properties: A reserved property is one can't be set by users but can be Users can use the following example to create a table with properties: ```sql -CREATE TABLE "metalake.catalog".dbname.tabname +CREATE TABLE catalog.dbname.tabname ( name varchar, salary int @@ -190,7 +190,7 @@ The results are similar to: gravitino jmx system - test.hive_test + hive_test (4 rows) Query 20231017_082503_00018_6nt3n, FINISHED, 1 node @@ -202,24 +202,24 @@ Other catalogs are regular user-configured Trino catalogs. ### Creating tables and schemas -Create a new schema named `database_01` in `test.hive_test` catalog. +Create a new schema named `database_01` in `hive_test` catalog. ```sql -CREATE SCHEMA "test.hive_test".database_01; +CREATE SCHEMA hive_test.database_01; ``` Create a new schema using HDFS location: ```sql -CREATE SCHEMA "test.hive_test".database_01 WITH ( +CREATE SCHEMA hive_test.database_01 WITH ( location = 'hdfs://hdfs-host:9000/user/hive/warehouse/database_01' ); ``` -Create a new table named `table_01` in schema `"test.hive_test".database_01` and stored in a TEXTFILE format, partitioning by `salary`, bucket by `name` and sorted by `salary`. +Create a new table named `table_01` in schema `hive_test.database_01` and stored in a TEXTFILE format, partitioning by `salary`, bucket by `name` and sorted by `salary`. ```sql -CREATE TABLE "test.hive_test".database_01.table_01 +CREATE TABLE hive_test.database_01.table_01 ( name varchar, salary int @@ -238,13 +238,13 @@ WITH ( Insert data into the table `table_01`: ```sql -INSERT INTO "test.hive_test".database_01.table_01 (name, salary) VALUES ('ice', 12); +INSERT INTO hive_test.database_01.table_01 (name, salary) VALUES ('ice', 12); ``` Insert data into the table `table_01` from select: ```sql -INSERT INTO "test.hive_test".database_01.table_01 (name, salary) SELECT * FROM "test.hive_test".database_01.table_01; +INSERT INTO hive_test.database_01.table_01 (name, salary) SELECT * FROM hive_test.database_01.table_01; ``` ### Querying data @@ -252,7 +252,7 @@ INSERT INTO "test.hive_test".database_01.table_01 (name, salary) SELECT * FROM " Query the `table_01` table: ```sql -SELECT * FROM "test.hive_test".database_01.table_01; +SELECT * FROM hive_test.database_01.table_01; ``` ### Modify a table @@ -260,19 +260,19 @@ SELECT * FROM "test.hive_test".database_01.table_01; Add a new column `age` to the `table_01` table: ```sql -ALTER TABLE "test.hive_test".database_01.table_01 ADD COLUMN age int; +ALTER TABLE hive_test.database_01.table_01 ADD COLUMN age int; ``` Drop a column `age` from the `table_01` table: ```sql -ALTER TABLE "test.hive_test".database_01.table_01 DROP COLUMN age; +ALTER TABLE hive_test.database_01.table_01 DROP COLUMN age; ``` Rename the `table_01` table to `table_02`: ```sql -ALTER TABLE "test.hive_test".database_01.table_01 RENAME TO "test.hive_test".database_01.table_02; +ALTER TABLE hive_test.database_01.table_01 RENAME TO hive_test.database_01.table_02; ``` ### DROP @@ -280,13 +280,13 @@ ALTER TABLE "test.hive_test".database_01.table_01 RENAME TO "test.hive_test".dat Drop a schema: ```sql -DROP SCHEMA "test.hive_test".database_01; +DROP SCHEMA hive_test.database_01; ``` Drop a table: ```sql -DROP TABLE "test.hive_test".database_01.table_01; +DROP TABLE hive_test.database_01.table_01; ``` ## HDFS config and permissions diff --git a/docs/trino-connector/catalog-iceberg.md b/docs/trino-connector/catalog-iceberg.md index 03903f9bd4e..033e3d8938d 100644 --- a/docs/trino-connector/catalog-iceberg.md +++ b/docs/trino-connector/catalog-iceberg.md @@ -141,7 +141,7 @@ The results are similar to: gravitino jmx system - test.iceberg_test + iceberg_test (4 rows) Query 20231017_082503_00018_6nt3n, FINISHED, 1 node @@ -156,13 +156,13 @@ Other catalogs are regular user-configured Trino catalogs. Create a new schema named `database_01` in `test.iceberg_test` catalog. ```sql -CREATE SCHEMA "test.iceberg_test".database_01; +CREATE SCHEMA iceberg_test.database_01; ``` Create a new table named `table_01` in schema `"test.iceberg_test".database_01`. ```sql -CREATE TABLE "test.iceberg_test".database_01.table_01 +CREATE TABLE iceberg_test.database_01.table_01 ( name varchar, salary int @@ -177,13 +177,13 @@ salary int Insert data into the table `table_01`: ```sql -INSERT INTO "test.iceberg_test".database_01.table_01 (name, salary) VALUES ('ice', 12); +INSERT INTO iceberg_test.database_01.table_01 (name, salary) VALUES ('ice', 12); ``` Insert data into the table `table_01` from select: ```sql -INSERT INTO "test.iceberg_test".database_01.table_01 (name, salary) SELECT * FROM "test.iceberg_test".database_01.table_01; +INSERT INTO iceberg_test.database_01.table_01 (name, salary) SELECT * FROM "test.iceberg_test".database_01.table_01; ``` ### Querying data diff --git a/docs/trino-connector/catalog-mysql.md b/docs/trino-connector/catalog-mysql.md index ce0b1298180..0219b3e04cd 100644 --- a/docs/trino-connector/catalog-mysql.md +++ b/docs/trino-connector/catalog-mysql.md @@ -90,7 +90,7 @@ The results are similar to: gravitino jmx system - test.mysql_test + mysql_test (4 rows) Query 20231017_082503_00018_6nt3n, FINISHED, 1 node @@ -105,13 +105,13 @@ Other catalogs are regular user-configured Trino catalogs. Create a new schema named `database_01` in `test.mysql_test` catalog. ```sql -CREATE SCHEMA "test.mysql_test".database_01; +CREATE SCHEMA mysql_test.database_01; ``` -Create a new table named `table_01` in schema `"test.mysql_test".database_01`. +Create a new table named `table_01` in schema `mysql_test.database_01`. ```sql -CREATE TABLE "test.mysql_test".database_01.table_01 +CREATE TABLE mysql_test.database_01.table_01 ( name varchar, salary int @@ -123,13 +123,13 @@ salary int Insert data into the table `table_01`: ```sql -INSERT INTO "test.mysql_test".database_01.table_01 (name, salary) VALUES ('ice', 12); +INSERT INTO mysql_test.database_01.table_01 (name, salary) VALUES ('ice', 12); ``` Insert data into the table `table_01` from select: ```sql -INSERT INTO "test.mysql_test".database_01.table_01 (name, salary) SELECT * FROM "test.mysql_test".database_01.table_01; +INSERT INTO mysql_test.database_01.table_01 (name, salary) SELECT * FROM "test.mysql_test".database_01.table_01; ``` ### Querying data @@ -137,7 +137,7 @@ INSERT INTO "test.mysql_test".database_01.table_01 (name, salary) SELECT * FROM Query the `table_01` table: ```sql -SELECT * FROM "test.mysql_test".database_01.table_01; +SELECT * FROM mysql_test.database_01.table_01; ``` ### Modify a table @@ -145,19 +145,19 @@ SELECT * FROM "test.mysql_test".database_01.table_01; Add a new column `age` to the `table_01` table: ```sql -ALTER TABLE "test.mysql_test".database_01.table_01 ADD COLUMN age int; +ALTER TABLE mysql_test.database_01.table_01 ADD COLUMN age int; ``` Drop a column `age` from the `table_01` table: ```sql -ALTER TABLE "test.mysql_test".database_01.table_01 DROP COLUMN age; +ALTER TABLE mysql_test.database_01.table_01 DROP COLUMN age; ``` Rename the `table_01` table to `table_02`: ```sql -ALTER TABLE "test.mysql_test".database_01.table_01 RENAME TO "test.mysql_test".database_01.table_02; +ALTER TABLE mysql_test.database_01.table_01 RENAME TO "test.mysql_test".database_01.table_02; ``` ### DROP @@ -165,11 +165,11 @@ ALTER TABLE "test.mysql_test".database_01.table_01 RENAME TO "test.mysql_test".d Drop a schema: ```sql -DROP SCHEMA "test.mysql_test".database_01; +DROP SCHEMA mysql_test.database_01; ``` Drop a table: ```sql -DROP TABLE "test.mysql_test".database_01.table_01; +DROP TABLE mysql_test.database_01.table_01; ``` \ No newline at end of file diff --git a/docs/trino-connector/catalog-postgresql.md b/docs/trino-connector/catalog-postgresql.md index ac595b07afe..66b03a92b3b 100644 --- a/docs/trino-connector/catalog-postgresql.md +++ b/docs/trino-connector/catalog-postgresql.md @@ -90,7 +90,7 @@ The results are similar to: gravitino jmx system - test.postgresql_test + postgresql_test (4 rows) Query 20231017_082503_00018_6nt3n, FINISHED, 1 node @@ -102,16 +102,16 @@ Other catalogs are regular user-configured Trino catalogs. ### Creating tables and schemas -Create a new schema named `database_01` in `test.postgresql_test` catalog. +Create a new schema named `database_01` in `postgresql_test` catalog. ```sql -CREATE SCHEMA "test.postgresql_test".database_01; +CREATE SCHEMA postgresql_test.database_01; ``` Create a new table named `table_01` in schema `"test.postgresql_test".database_01`. ```sql -CREATE TABLE "test.postgresql_test".database_01.table_01 +CREATE TABLE postgresql_test.database_01.table_01 ( name varchar, salary int @@ -123,13 +123,13 @@ salary int Insert data into the table `table_01`: ```sql -INSERT INTO "test.postgresql_test".database_01.table_01 (name, salary) VALUES ('ice', 12); +INSERT INTO postgresql_test.database_01.table_01 (name, salary) VALUES ('ice', 12); ``` Insert data into the table `table_01` from select: ```sql -INSERT INTO "test.postgresql_test".database_01.table_01 (name, salary) SELECT * FROM "test.postgresql_test".database_01.table_01; +INSERT INTO postgresql_test.database_01.table_01 (name, salary) SELECT * FROM "test.postgresql_test".database_01.table_01; ``` ### Querying data @@ -137,7 +137,7 @@ INSERT INTO "test.postgresql_test".database_01.table_01 (name, salary) SELECT * Query the `table_01` table: ```sql -SELECT * FROM "test.postgresql_test".database_01.table_01; +SELECT * FROM postgresql_test.database_01.table_01; ``` ### Modify a table @@ -145,19 +145,19 @@ SELECT * FROM "test.postgresql_test".database_01.table_01; Add a new column `age` to the `table_01` table: ```sql -ALTER TABLE "test.postgresql_test".database_01.table_01 ADD COLUMN age int; +ALTER TABLE postgresql_test.database_01.table_01 ADD COLUMN age int; ``` Drop a column `age` from the `table_01` table: ```sql -ALTER TABLE "test.postgresql_test".database_01.table_01 DROP COLUMN age; +ALTER TABLE postgresql_test.database_01.table_01 DROP COLUMN age; ``` Rename the `table_01` table to `table_02`: ```sql -ALTER TABLE "test.postgresql_test".database_01.table_01 RENAME TO "test.postgresql_test".database_01.table_02; +ALTER TABLE postgresql_test.database_01.table_01 RENAME TO "test.postgresql_test".database_01.table_02; ``` ### Drop @@ -165,11 +165,11 @@ ALTER TABLE "test.postgresql_test".database_01.table_01 RENAME TO "test.postgres Drop a schema: ```sql -DROP SCHEMA "test.postgresql_test".database_01; +DROP SCHEMA postgresql_test.database_01; ``` Drop a table: ```sql -DROP TABLE "test.postgresql_test".database_01.table_01; +DROP TABLE postgresql_test.database_01.table_01; ``` \ No newline at end of file diff --git a/docs/trino-connector/configuration.md b/docs/trino-connector/configuration.md index 5b1065abe7d..1b6bf02d8e9 100644 --- a/docs/trino-connector/configuration.md +++ b/docs/trino-connector/configuration.md @@ -6,9 +6,9 @@ license: "Copyright 2023 Datastrato Pvt Ltd. This software is licensed under the Apache License version 2." --- -| Property | Type | Default Value | Description | Required | Since Version | +| Property | Type | Default Value | Description | Required | Since Version | |-----------------------------------|---------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------|---------------| -| connector.name | string | (none) | The `connector.name` defines the name of Trino connector, this value is always 'gravitino'. | Yes | 0.2.0 | +| connector.name | string | (none) | The `connector.name` defines the type of Trino connector, this value is always 'gravitino'. | Yes | 0.2.0 | | gravitino.metalake | string | (none) | The `gravitino.metalake` defines which metalake in Gravitino server the Trino connector uses. Trino connector should set it at start, the value of `gravitino.metalake` needs to be a valid name, Trino connector can detect and load the metalake with catalogs, schemas and tables once created and keep in sync. | Yes | 0.2.0 | | gravitino.uri | string | http://localhost:8090 | The `gravitino.uri` defines the connection URL of the Gravitino server, the default value is `http://localhost:8090`. Trino connector can detect and connect to Gravitino server once it is ready, no need to start Gravitino server beforehand. | Yes | 0.2.0 | -| gravitino.simplify-catalog-names | boolean | false | The `gravitino.simplify-catalog-names` setting omits the metalake prefix from catalog names when set to true. If you set it to true, Trino will configure only one Graviton catalog. | NO | 0.5.0 | +| gravitino.simplify-catalog-names | boolean | true | The `gravitino.simplify-catalog-names` setting omits the metalake prefix from catalog names when set to true. | NO | 0.5.0 | diff --git a/docs/trino-connector/development.md b/docs/trino-connector/development.md index 1c7a7b167af..c1393e14c87 100644 --- a/docs/trino-connector/development.md +++ b/docs/trino-connector/development.md @@ -249,6 +249,7 @@ gravitino.uri=http://localhost:8090 # The name of the metalake to which the connector is connected, you need to change it according to your environment gravitino.metalake=test + ``` - Trino configuration file: `config.properties` ```properties diff --git a/docs/trino-connector/installation.md b/docs/trino-connector/installation.md index 707d1d3802b..2fb44c53c35 100644 --- a/docs/trino-connector/installation.md +++ b/docs/trino-connector/installation.md @@ -74,11 +74,13 @@ To configure Gravitino connector correctly, you need to put the following config connector.name=gravitino gravitino.uri=http://gravitino-server-host:8090 gravitino.metalake=test +gravitino.simplify-catalog-names=true ``` - The `gravitino.name` defines which Gravitino connector is used. It must be `gravitino`. - The `gravitino.metalake` defines which metalake are used. It should exist in the Gravitino server. - The `gravitino.uri` defines the connection information about Gravitino server. Make sure your container can access the Gravitino server. +- The `gravitino.simplify-catalog-names` setting omits the metalake prefix from catalog names when set to true. Full configurations for Gravitino connector can be seen [here](configuration.md) @@ -126,7 +128,7 @@ memory tpcds tpch system -test.jdbc-mysql +jdbc-mysql ``` -The catalog named 'test.jdbc-mysql' is the catalog that you created by gravitino server, and you can use it to access the mysql database like other Trino catalogs. +The catalog named 'jdbc-mysql' is the catalog that you created by gravitino server, and you can use it to access the mysql database like other Trino catalogs. diff --git a/docs/trino-connector/supported-catalog.md b/docs/trino-connector/supported-catalog.md index b578e668a51..158002c469f 100644 --- a/docs/trino-connector/supported-catalog.md +++ b/docs/trino-connector/supported-catalog.md @@ -75,7 +75,7 @@ The result is like: ```test name | provider | properties --------------+----------+------------------------------------------------------------------------------------------------------------- - test.gt_hive | hive | {gravitino.bypass.hive.metastore.client.capability.check=false, metastore.uris=thrift://trino-ci-hive:9083} + gt_hive | hive | {gravitino.bypass.hive.metastore.client.capability.check=false, metastore.uris=thrift://trino-ci-hive:9083} ``` Example: diff --git a/docs/trino-connector/trino-connector.md b/docs/trino-connector/trino-connector.md index 1858d695963..648fc579034 100644 --- a/docs/trino-connector/trino-connector.md +++ b/docs/trino-connector/trino-connector.md @@ -15,17 +15,14 @@ Once metadata such as catalogs, schemas, or tables are changed in Gravitino, Tri about 3~10 seconds. ::: -The loading of Gravitino's catalogs into Trino follows the naming convention: +By default, the loading of Gravitino's catalogs into Trino follows the naming convention: ```text -{metalake}.{catalog} +{catalog} ``` -Regarding `metalake` and `catalog`, -you can refer to [Create a Metalake](../manage-relational-metadata-using-gravitino.md#create-a-metalake), [Create a Catalog](../manage-relational-metadata-using-gravitino.md#create-a-catalog). - Usage in queries is as follows: ```text -SELECT * from "metalake.catalog".dbname.tabname +SELECT * from catalog.dbname.tabname ``` diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index e5dbf1a46fe..c581d27a1ca 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -53,6 +53,7 @@ kyuubi = "1.8.0" kafka = "3.4.0" curator = "2.12.0" awaitility = "4.2.1" +servlet = "3.1.0" protobuf-plugin = "0.9.2" spotless-plugin = '6.11.0' @@ -164,6 +165,7 @@ rauschig = { group = "org.rauschig", name = "jarchivelib", version.ref = "rausch mybatis = { group = "org.mybatis", name = "mybatis", version.ref = "mybatis"} h2db = { group = "com.h2database", name = "h2", version.ref = "h2db"} awaitility = { group = "org.awaitility", name = "awaitility", version.ref = "awaitility" } +servlet = { group = "javax.servlet", name = "javax.servlet-api", version.ref = "servlet" } [bundles] log4j = ["slf4j-api", "log4j-slf4j2-impl", "log4j-api", "log4j-core", "log4j-12-api"] diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java index 0067763c4b4..1f8c90ce769 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/trino/TrinoConnectorIT.java @@ -131,10 +131,9 @@ public static void stopDockerContainer() { public static void createSchema() throws TException, InterruptedException { String sql1 = String.format( - "CREATE SCHEMA \"%s.%s\".%s WITH (\n" + "CREATE SCHEMA %s.%s WITH (\n" + " location = 'hdfs://%s:%d/user/hive/warehouse/%s.db'\n" + ")", - metalakeName, catalogName, databaseName, containerSuite.getHiveContainer().getContainerIpAddress(), @@ -149,17 +148,13 @@ public static void createSchema() throws TException, InterruptedException { containerSuite .getTrinoContainer() .executeQuerySQL( - String.format( - "show schemas from \"%s.%s\" like '%s'", - metalakeName, catalogName, databaseName)); + String.format("show schemas from %s like '%s'", catalogName, databaseName)); Assertions.assertEquals(r.get(0).get(0), databaseName); } @Test public void testShowSchemas() { - String sql = - String.format( - "SHOW SCHEMAS FROM \"%s.%s\" LIKE '%s'", metalakeName, catalogName, databaseName); + String sql = String.format("SHOW SCHEMAS FROM %s LIKE '%s'", catalogName, databaseName); ArrayList> queryData = containerSuite.getTrinoContainer().executeQuerySQL(sql); Assertions.assertEquals(queryData.get(0).get(0), databaseName); @@ -169,7 +164,7 @@ public void testShowSchemas() { public void testCreateTable() throws TException, InterruptedException { String sql3 = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (\n" + "CREATE TABLE %s.%s.%s (\n" + " col1 varchar,\n" + " col2 varchar,\n" + " col3 varchar\n" @@ -177,7 +172,7 @@ public void testCreateTable() throws TException, InterruptedException { + "WITH (\n" + " format = 'TEXTFILE'\n" + ")", - metalakeName, catalogName, databaseName, tab1Name); + catalogName, databaseName, tab1Name); containerSuite.getTrinoContainer().executeUpdateSQL(sql3); // Verify in Gravitino Server @@ -192,9 +187,7 @@ public void testCreateTable() throws TException, InterruptedException { private void testShowTable() { String sql = - String.format( - "SHOW TABLES FROM \"%s.%s\".%s LIKE '%s'", - metalakeName, catalogName, databaseName, tab1Name); + String.format("SHOW TABLES FROM %s.%s LIKE '%s'", catalogName, databaseName, tab1Name); ArrayList> queryData = containerSuite.getTrinoContainer().executeQuerySQL(sql); Assertions.assertEquals(queryData.get(0).get(0), tab1Name); @@ -205,9 +198,7 @@ private void verifySchemaAndTable(String dbName, String tableName) { ArrayList> r = containerSuite .getTrinoContainer() - .executeQuerySQL( - String.format( - "show schemas from \"%s.%s\" like '%s'", metalakeName, catalogName, dbName)); + .executeQuerySQL(String.format("show schemas from %s like '%s'", catalogName, dbName)); Assertions.assertEquals(r.get(0).get(0), dbName); // Compare table @@ -215,16 +206,14 @@ private void verifySchemaAndTable(String dbName, String tableName) { containerSuite .getTrinoContainer() .executeQuerySQL( - String.format( - "show create table \"%s.%s\".%s.%s", - metalakeName, catalogName, dbName, tableName)); + String.format("show create table %s.%s.%s", catalogName, dbName, tableName)); Assertions.assertTrue(r.get(0).get(0).contains(tableName)); } public void testScenarioTable1() throws TException, InterruptedException { String sql3 = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (\n" + "CREATE TABLE %s.%s.%s (\n" + " user_name varchar,\n" + " gender varchar,\n" + " age varchar,\n" @@ -238,7 +227,7 @@ public void testScenarioTable1() throws TException, InterruptedException { + "WITH (\n" + " format = 'TEXTFILE'\n" + ")", - metalakeName, catalogName, databaseName, scenarioTab1Name); + catalogName, databaseName, scenarioTab1Name); containerSuite.getTrinoContainer().executeUpdateSQL(sql3); // Verify in Gravitino Server @@ -258,8 +247,8 @@ public void testScenarioTable1() throws TException, InterruptedException { StringBuilder sql5 = new StringBuilder( String.format( - "INSERT INTO \"%s.%s\".%s.%s (user_name, gender, age, phone) VALUES", - metalakeName, catalogName, databaseName, scenarioTab1Name)); + "INSERT INTO %s.%s.%s (user_name, gender, age, phone) VALUES", + catalogName, databaseName, scenarioTab1Name)); int index = 0; for (ArrayList record : table1Data) { sql5.append( @@ -276,8 +265,8 @@ public void testScenarioTable1() throws TException, InterruptedException { // Select data from table1 and verify it String sql6 = String.format( - "SELECT user_name, gender, age, phone FROM \"%s.%s\".%s.%s ORDER BY user_name", - metalakeName, catalogName, databaseName, scenarioTab1Name); + "SELECT user_name, gender, age, phone FROM %s.%s.%s ORDER BY user_name", + catalogName, databaseName, scenarioTab1Name); ArrayList> table1QueryData = containerSuite.getTrinoContainer().executeQuerySQL(sql6); Assertions.assertEquals(table1Data, table1QueryData); @@ -286,7 +275,7 @@ public void testScenarioTable1() throws TException, InterruptedException { public void testScenarioTable2() throws TException, InterruptedException { String sql4 = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (\n" + "CREATE TABLE %s.%s.%s (\n" + " user_name varchar,\n" + " consumer varchar,\n" + " recharge varchar,\n" @@ -297,7 +286,7 @@ public void testScenarioTable2() throws TException, InterruptedException { + "WITH (\n" + " format = 'TEXTFILE'\n" + ")", - metalakeName, catalogName, databaseName, scenarioTab2Name); + catalogName, databaseName, scenarioTab2Name); containerSuite.getTrinoContainer().executeUpdateSQL(sql4); // Verify in Gravitino Server @@ -318,8 +307,8 @@ public void testScenarioTable2() throws TException, InterruptedException { StringBuilder sql7 = new StringBuilder( String.format( - "INSERT INTO \"%s.%s\".%s.%s (user_name, consumer, recharge, event_time) VALUES", - metalakeName, catalogName, databaseName, scenarioTab2Name)); + "INSERT INTO %s.%s.%s (user_name, consumer, recharge, event_time) VALUES", + catalogName, databaseName, scenarioTab2Name)); for (ArrayList record : table2Data) { sql7.append( String.format( @@ -335,8 +324,8 @@ public void testScenarioTable2() throws TException, InterruptedException { // Select data from table1 and verify it String sql8 = String.format( - "SELECT user_name, consumer, recharge, event_time FROM \"%s.%s\".%s.%s ORDER BY user_name", - metalakeName, catalogName, databaseName, scenarioTab2Name); + "SELECT user_name, consumer, recharge, event_time FROM %s.%s.%s ORDER BY user_name", + catalogName, databaseName, scenarioTab2Name); ArrayList> table2QueryData = containerSuite.getTrinoContainer().executeQuerySQL(sql8); Assertions.assertEquals(table2Data, table2QueryData); @@ -349,11 +338,11 @@ public void testScenarioJoinTwoTable() throws TException, InterruptedException { String sql9 = String.format( - "SELECT * FROM (SELECT t1.user_name as user_name, gender, age, phone, consumer, recharge, event_time FROM \"%1$s.%2$s\".%3$s.%4$s AS t1\n" + "SELECT * FROM (SELECT t1.user_name as user_name, gender, age, phone, consumer, recharge, event_time FROM %1s.%2$s.%3$s AS t1\n" + "JOIN\n" - + " (SELECT user_name, consumer, recharge, event_time FROM \"%1$s.%2$s\".%3$s.%5$s) AS t2\n" + + " (SELECT user_name, consumer, recharge, event_time FROM %1$s.%2$s.%4$s) AS t2\n" + " ON t1.user_name = t2.user_name) ORDER BY user_name", - metalakeName, catalogName, databaseName, scenarioTab1Name, scenarioTab2Name); + catalogName, databaseName, scenarioTab1Name, scenarioTab2Name); ArrayList> joinQueryData = containerSuite.getTrinoContainer().executeQuerySQL(sql9); ArrayList> joinData = new ArrayList<>(); @@ -379,8 +368,8 @@ void testHiveSchemaCreatedByTrino() { String createSchemaSql = String.format( - "CREATE SCHEMA \"%s.%s\".%s with( location = 'hdfs://localhost:9000/user/hive/warehouse/hive_schema_1123123')", - metalakeName, catalogName, schemaName); + "CREATE SCHEMA %s.%s with( location = 'hdfs://localhost:9000/user/hive/warehouse/hive_schema_1123123')", + catalogName, schemaName); containerSuite.getTrinoContainer().executeUpdateSQL(createSchemaSql); Schema schema = @@ -395,17 +384,16 @@ void testHiveTableCreatedByTrino() { String schemaName = GravitinoITUtils.genRandomName("schema").toLowerCase(); String tableName = GravitinoITUtils.genRandomName("table").toLowerCase(); - String createSchemaSql = - String.format("CREATE SCHEMA \"%s.%s\".%s", metalakeName, catalogName, schemaName); + String createSchemaSql = String.format("CREATE SCHEMA %s.%s", catalogName, schemaName); containerSuite.getTrinoContainer().executeUpdateSQL(createSchemaSql); String createTableSql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int, name varchar)" + "CREATE TABLE %s.%s.%s (id int, name varchar)" + " with ( serde_name = '123455', location = 'hdfs://localhost:9000/user/hive/warehouse/hive_schema.db/hive_table'" + ", partitioned_by = ARRAY['name'], bucketed_by = ARRAY['id'], bucket_count = 50, sorted_by = ARRAY['name']" + ")", - metalakeName, catalogName, schemaName, tableName); + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(createTableSql); Table table = @@ -473,8 +461,7 @@ void testHiveSchemaCreatedByGravitino() throws InterruptedException { .put("location", "hdfs://localhost:9000/user/hive/warehouse/hive_schema_1223445.db") .build()); - String sql = - String.format("show create schema \"%s.%s\".%s", metalakeName, catalogName, schemaName); + String sql = String.format("show create schema %s.%s", catalogName, schemaName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load schema created by gravitino: " + sql); @@ -623,27 +610,25 @@ void testColumnTypeNotNullByTrino() throws InterruptedException { .put("jdbc-url", String.format("jdbc:mysql://%s:3306?useSSL=false", hiveHost)) .build()); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); Assertions.assertTrue(checkTrinoHasLoaded(sql, 30)); String schemaName = GravitinoITUtils.genRandomName("schema").toLowerCase(); String tableName = GravitinoITUtils.genRandomName("table").toLowerCase(); - String createSchemaSql = - String.format("CREATE SCHEMA \"%s.%s\".%s", metalakeName, catalogName, schemaName); + String createSchemaSql = String.format("CREATE SCHEMA %s.%s", catalogName, schemaName); containerSuite.getTrinoContainer().executeUpdateSQL(createSchemaSql); - sql = String.format("show create schema \"%s.%s\".%s", metalakeName, catalogName, schemaName); + sql = String.format("show create schema %s.%s", catalogName, schemaName); Assertions.assertTrue(checkTrinoHasLoaded(sql, 30)); String createTableSql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int not null, name varchar not null)", - metalakeName, catalogName, schemaName, tableName); + "CREATE TABLE %s.%s.%s (id int not null, name varchar not null)", + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(createTableSql); String showCreateTableSql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); ArrayList> rs = containerSuite.getTrinoContainer().executeQuerySQL(showCreateTableSql); Assertions.assertTrue(rs.get(0).get(0).toLowerCase(Locale.ENGLISH).contains("not null")); @@ -652,8 +637,7 @@ void testColumnTypeNotNullByTrino() throws InterruptedException { .getTrinoContainer() .executeUpdateSQL( String.format( - "insert into \"%s.%s\".%s.%s values(1, 'a')", - metalakeName, catalogName, schemaName, tableName)); + "insert into %s.%s.%s values(1, 'a')", catalogName, schemaName, tableName)); Assertions.assertThrows( RuntimeException.class, () -> @@ -661,8 +645,8 @@ void testColumnTypeNotNullByTrino() throws InterruptedException { .getTrinoContainer() .executeUpdateSQL( String.format( - "insert into \"%s.%s\".%s.%s values(null, 'a')", - metalakeName, catalogName, schemaName, tableName))); + "insert into %s.%s.%s values(null, 'a')", + catalogName, schemaName, tableName))); Assertions.assertThrows( RuntimeException.class, () -> @@ -670,8 +654,8 @@ void testColumnTypeNotNullByTrino() throws InterruptedException { .getTrinoContainer() .executeUpdateSQL( String.format( - "insert into \"%s.%s\".%s.%s values(1, null)", - metalakeName, catalogName, schemaName, tableName))); + "insert into %s.%s.%s values(1, null)", + catalogName, schemaName, tableName))); Assertions.assertThrows( RuntimeException.class, () -> @@ -679,8 +663,8 @@ void testColumnTypeNotNullByTrino() throws InterruptedException { .getTrinoContainer() .executeUpdateSQL( String.format( - "insert into \"%s.%s\".%s.%s values(null, null)", - metalakeName, catalogName, schemaName, tableName))); + "insert into %s.%s.%s values(null, null)", + catalogName, schemaName, tableName))); catalog .asTableCatalog() @@ -749,9 +733,7 @@ void testHiveTableCreatedByGravitino() throws InterruptedException { .loadTable(NameIdentifier.of(metalakeName, catalogName, schemaName, tableName)); Assertions.assertNotNull(table); - String sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + String sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load table created by gravitino: " + sql); @@ -777,13 +759,11 @@ void testHiveTableCreatedByGravitino() throws InterruptedException { tableName = GravitinoITUtils.genRandomName("table_format1").toLowerCase(); sql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int, name varchar) with (format = 'ORC')", - metalakeName, catalogName, schemaName, tableName); + "CREATE TABLE %s.%s.%s (id int, name varchar) with (format = 'ORC')", + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); Assertions.assertTrue(checkTrinoHasLoaded(sql, 30), "Trino fail to create table:" + tableName); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); @@ -797,12 +777,10 @@ void testHiveTableCreatedByGravitino() throws InterruptedException { tableName = GravitinoITUtils.genRandomName("table_format2").toLowerCase(); sql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int, name varchar) with (format = 'ORC', input_format = 'org.apache.hadoop.mapred.TextInputFormat')", - metalakeName, catalogName, schemaName, tableName); + "CREATE TABLE %s.%s.%s (id int, name varchar) with (format = 'ORC', input_format = 'org.apache.hadoop.mapred.TextInputFormat')", + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); Assertions.assertTrue(checkTrinoHasLoaded(sql, 30), "Trino fail to create table:" + tableName); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); @@ -816,12 +794,10 @@ void testHiveTableCreatedByGravitino() throws InterruptedException { tableName = GravitinoITUtils.genRandomName("table_format3").toLowerCase(); sql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int, name varchar) with (format = 'ORC', output_format = 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat')", - metalakeName, catalogName, schemaName, tableName); + "CREATE TABLE %s.%s.%s (id int, name varchar) with (format = 'ORC', output_format = 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat')", + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); Assertions.assertTrue(checkTrinoHasLoaded(sql, 30), "Trino fail to create table:" + tableName); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); @@ -860,7 +836,7 @@ void testHiveCatalogCreatedByGravitino() throws InterruptedException { Assertions.assertEquals("true", catalog.properties().get("hive.create-empty-bucket-files")); Assertions.assertEquals("true", catalog.properties().get("hive.validate-bucketing")); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql); @@ -868,7 +844,7 @@ void testHiveCatalogCreatedByGravitino() throws InterruptedException { // Because we assign 'hive.target-max-file-size' a wrong value, trino can't load the catalog String data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); - Assertions.assertEquals(metalakeName + "." + catalogName, data); + Assertions.assertEquals(catalogName, data); } @Test @@ -899,7 +875,7 @@ void testWrongHiveCatalogProperty() throws InterruptedException { Assertions.assertEquals("true", catalog.properties().get("hive.create-empty-bucket-files")); Assertions.assertEquals("true", catalog.properties().get("hive.validate-bucketing")); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); checkTrinoHasLoaded(sql, 6); // Because we assign 'hive.target-max-file-size' a wrong value, trino can't load the catalog Assertions.assertTrue(containerSuite.getTrinoContainer().executeQuerySQL(sql).isEmpty()); @@ -966,9 +942,7 @@ void testIcebergTableAndSchemaCreatedByGravitino() throws InterruptedException { .loadTable(NameIdentifier.of(metalakeName, catalogName, schemaName, tableName)); Assertions.assertNotNull(table); - String sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + String sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { @@ -985,8 +959,8 @@ void testIcebergTableAndSchemaCreatedByGravitino() throws InterruptedException { String tableCreatedByTrino = GravitinoITUtils.genRandomName("table").toLowerCase(); String createTableSql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int, name varchar) with (partitioning = ARRAY['name'], sorted_by = ARRAY['id'])", - metalakeName, catalogName, schemaName, tableCreatedByTrino); + "CREATE TABLE %s.%s.%s (id int, name varchar) with (partitioning = ARRAY['name'], sorted_by = ARRAY['id'])", + catalogName, schemaName, tableCreatedByTrino); containerSuite.getTrinoContainer().executeUpdateSQL(createTableSql); table = @@ -1007,14 +981,12 @@ void testIcebergTableAndSchemaCreatedByTrino() { String schemaName = GravitinoITUtils.genRandomName("schema").toLowerCase(); String tableName = GravitinoITUtils.genRandomName("table").toLowerCase(); - String createSchemaSql = - String.format("CREATE SCHEMA \"%s.%s\".%s", metalakeName, catalogName, schemaName); + String createSchemaSql = String.format("CREATE SCHEMA %s.%s", catalogName, schemaName); containerSuite.getTrinoContainer().executeUpdateSQL(createSchemaSql); String createTableSql = String.format( - "CREATE TABLE \"%s.%s\".%s.%s (id int, name varchar)", - metalakeName, catalogName, schemaName, tableName); + "CREATE TABLE %s.%s.%s (id int, name varchar)", catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(createTableSql); Table table = @@ -1061,14 +1033,14 @@ void testIcebergCatalogCreatedByGravitino() throws InterruptedException { Catalog catalog = createdMetalake.loadCatalog(NameIdentifier.of(metalakeName, catalogName)); Assertions.assertEquals("root", catalog.properties().get("jdbc-user")); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql); } String data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); - Assertions.assertEquals(metalakeName + "." + catalogName, data); + Assertions.assertEquals(catalogName, data); catalog .asSchemas() @@ -1077,12 +1049,10 @@ void testIcebergCatalogCreatedByGravitino() throws InterruptedException { "Created by gravitino client", ImmutableMap.builder().build()); - sql = - String.format("show schemas in \"%s.%s\" like '%s'", metalakeName, catalogName, schemaName); + sql = String.format("show schemas in %s like '%s'", catalogName, schemaName); Assertions.assertTrue(checkTrinoHasLoaded(sql, 30)); - final String sql1 = - String.format("drop schema \"%s.%s\".%s cascade", metalakeName, catalogName, schemaName); + final String sql1 = String.format("drop schema %s.%s cascade", catalogName, schemaName); // Will fail because the iceberg catalog does not support cascade drop TrinoContainer trinoContainer = containerSuite.getTrinoContainer(); Assertions.assertThrows( @@ -1091,8 +1061,7 @@ void testIcebergCatalogCreatedByGravitino() throws InterruptedException { trinoContainer.executeUpdateSQL(sql1); }); - final String sql2 = - String.format("show schemas in \"%s.%s\" like '%s'", metalakeName, catalogName, schemaName); + final String sql2 = String.format("show schemas in %s like '%s'", catalogName, schemaName); success = checkTrinoHasLoaded(sql2, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql2); @@ -1104,8 +1073,7 @@ void testIcebergCatalogCreatedByGravitino() throws InterruptedException { .asSchemas() .dropSchema(NameIdentifier.of(metalakeName, catalogName, schemaName), true); Assertions.assertFalse(success); - final String sql3 = - String.format("show schemas in \"%s.%s\" like '%s'", metalakeName, catalogName, schemaName); + final String sql3 = String.format("show schemas in %s like '%s'", catalogName, schemaName); success = checkTrinoHasLoaded(sql3, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql); @@ -1143,14 +1111,14 @@ void testMySQLCatalogCreatedByGravitino() throws InterruptedException { Catalog catalog = createdMetalake.loadCatalog(NameIdentifier.of(metalakeName, catalogName)); Assertions.assertEquals("root", catalog.properties().get("jdbc-user")); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql); } String data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); - Assertions.assertEquals(metalakeName + "." + catalogName, data); + Assertions.assertEquals(catalogName, data); } @Test @@ -1186,14 +1154,14 @@ void testMySQLTableCreatedByGravitino() throws InterruptedException { Catalog catalog = createdMetalake.loadCatalog(NameIdentifier.of(metalakeName, catalogName)); Assertions.assertEquals("root", catalog.properties().get("jdbc-user")); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql); } String data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); - Assertions.assertEquals(metalakeName + "." + catalogName, data); + Assertions.assertEquals(catalogName, data); Schema schema = catalog @@ -1220,9 +1188,7 @@ void testMySQLTableCreatedByGravitino() throws InterruptedException { .loadTable(NameIdentifier.of(metalakeName, catalogName, schemaName, tableName)); Assertions.assertNotNull(table); - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); success = checkTrinoHasLoaded(sql, 30); if (!success) { @@ -1256,9 +1222,7 @@ void testMySQLTableCreatedByGravitino() throws InterruptedException { new Index[] { Indexes.createMysqlPrimaryKey(new String[][] {new String[] {"IntegerType"}}) }); - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); success = checkTrinoHasLoaded(sql, 30); if (!success) { @@ -1304,46 +1268,43 @@ void testMySQLTableCreatedByTrino() throws InterruptedException { Catalog catalog = createdMetalake.loadCatalog(NameIdentifier.of(metalakeName, catalogName)); Assertions.assertEquals("root", catalog.properties().get("jdbc-user")); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); boolean success = checkTrinoHasLoaded(sql, 30); if (!success) { Assertions.fail("Trino fail to load catalogs created by gravitino: " + sql); } String data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); - Assertions.assertEquals(metalakeName + "." + catalogName, data); + Assertions.assertEquals(catalogName, data); // Create schema - sql = String.format("create schema \"%s.%s\".%s", metalakeName, catalogName, schemaName); + sql = String.format("create schema %s.%s", catalogName, schemaName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); // create table sql = String.format( - "create table \"%s.%s\".%s.%s (id int, name varchar)", - metalakeName, catalogName, schemaName, tableName); + "create table %s.%s.%s (id int, name varchar)", catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); // Add a not null column sql = String.format( - "alter table \"%s.%s\".%s.%s add column age int not null comment 'age of users'", - metalakeName, catalogName, schemaName, tableName); + "alter table %s.%s.%s add column age int not null comment 'age of users'", + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); sql = String.format( - "alter table \"%s.%s\".%s.%s add column address varchar(20) not null comment 'address of users'", - metalakeName, catalogName, schemaName, tableName); + "alter table %s.%s.%s add column address varchar(20) not null comment 'address of users'", + catalogName, schemaName, tableName); containerSuite.getTrinoContainer().executeUpdateSQL(sql); catalog .asTableCatalog() .loadTable(NameIdentifier.of(metalakeName, catalogName, schemaName, tableName)); - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); Assertions.assertTrue(data.contains("age integer NOT NULL")); @@ -1353,57 +1314,45 @@ void testMySQLTableCreatedByTrino() throws InterruptedException { String tableName1 = "t112"; sql = String.format( - "create table \"%s.%s\".%s.%s (id int, t1name varchar)", - metalakeName, catalogName, schemaName, tableName1); + "create table %s.%s.%s (id int, t1name varchar)", catalogName, schemaName, tableName1); containerSuite.getTrinoContainer().executeUpdateSQL(sql); String tableName2 = "t212"; sql = String.format( - "create table \"%s.%s\".%s.%s (id int, t2name varchar)", - metalakeName, catalogName, schemaName, tableName2); + "create table %s.%s.%s (id int, t2name varchar)", catalogName, schemaName, tableName2); containerSuite.getTrinoContainer().executeUpdateSQL(sql); String tableName3 = "t_12"; sql = String.format( - "create table \"%s.%s\".%s.%s (id int, t3name varchar)", - metalakeName, catalogName, schemaName, tableName3); + "create table %s.%s.%s (id int, t3name varchar)", catalogName, schemaName, tableName3); containerSuite.getTrinoContainer().executeUpdateSQL(sql); String tableName4 = "_1__"; sql = String.format( - "create table \"%s.%s\".%s.%s (id int, t4name varchar)", - metalakeName, catalogName, schemaName, tableName4); + "create table %s.%s.%s (id int, t4name varchar)", catalogName, schemaName, tableName4); containerSuite.getTrinoContainer().executeUpdateSQL(sql); // Get table tableName1 - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName1); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName1); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); data.contains("t1name varchar"); // Get table tableName2 - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName2); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName2); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); data.contains("t2name varchar"); // Get table tableName3 - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName3); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName3); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); data.contains("t3name varchar"); // Get table tableName4 - sql = - String.format( - "show create table \"%s.%s\".%s.%s", metalakeName, catalogName, schemaName, tableName4); + sql = String.format("show create table %s.%s.%s", catalogName, schemaName, tableName4); data = containerSuite.getTrinoContainer().executeQuerySQL(sql).get(0).get(0); data.contains("t4name varchar"); } @@ -1440,7 +1389,7 @@ void testDropCatalogAndCreateAgain() throws InterruptedException { .put("jdbc-url", String.format("jdbc:mysql://%s:3306?useSSL=false", hiveHost)) .build()); - String sql = String.format("show catalogs like '%s.%s'", metalakeName, catalogName); + String sql = String.format("show catalogs like '%s'", catalogName); boolean success = checkTrinoHasLoaded(sql, 30); Assertions.assertTrue(success, "Trino should load the catalog: " + sql); diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/rest/KerberosOperationsIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/rest/KerberosOperationsIT.java index 0c28d1b5d2b..2a48024f353 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/rest/KerberosOperationsIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/rest/KerberosOperationsIT.java @@ -9,13 +9,13 @@ import static com.datastrato.gravitino.server.authentication.KerberosConfig.PRINCIPAL; import static org.apache.hadoop.minikdc.MiniKdc.MAX_TICKET_LIFETIME; +import com.datastrato.gravitino.Configs; import com.datastrato.gravitino.auth.AuthenticatorType; import com.datastrato.gravitino.client.GravitinoVersion; import com.datastrato.gravitino.client.KerberosTokenProvider; import com.datastrato.gravitino.integration.test.util.AbstractIT; import com.datastrato.gravitino.integration.test.util.ITUtils; import com.datastrato.gravitino.integration.test.util.KerberosProviderHelper; -import com.datastrato.gravitino.server.authentication.KerberosConfig; import com.google.common.collect.Maps; import java.io.File; import java.io.IOException; @@ -59,8 +59,7 @@ public static void startIntegrationTest() throws Exception { .withClientPrincipal(clientPrincipal) .withKeyTabFile(new File(keytabFile)) .build()); - configs.put( - KerberosConfig.AUTHENTICATOR.getKey(), AuthenticatorType.KERBEROS.name().toLowerCase()); + configs.put(Configs.AUTHENTICATOR.getKey(), AuthenticatorType.KERBEROS.name().toLowerCase()); configs.put(PRINCIPAL.getKey(), serverPrincipal); configs.put(KEYTAB.getKey(), keytabFile); registerCustomConfigs(configs); diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java index 466500447c9..b31631030ba 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/CatalogsPageTest.java @@ -251,7 +251,7 @@ public void testCreateKafkaCatalog() throws InterruptedException { @Order(7) public void testRefreshPage() { driver.navigate().refresh(); - Assertions.assertEquals(driver.getTitle(), WEB_TITLE); + Assertions.assertEquals(WEB_TITLE, driver.getTitle()); Assertions.assertTrue(catalogsPage.verifyRefreshPage()); List catalogsNames = Arrays.asList( diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/MetalakePageTest.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/MetalakePageTest.java index 271be49e0ad..d288a8a2932 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/MetalakePageTest.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/web/ui/MetalakePageTest.java @@ -125,7 +125,7 @@ public void testLinkToCatalogsPage() throws InterruptedException { public void testRefreshPage() { driver.navigate().refresh(); - Assertions.assertEquals(driver.getTitle(), WEB_TITLE); + Assertions.assertEquals(WEB_TITLE, driver.getTitle()); Assertions.assertTrue(metalakePage.verifyRefreshPage()); } diff --git a/integration-test/src/test/resources/trino-ci-testset/bugs/00002_alter_table_mysql.sql b/integration-test/src/test/resources/trino-ci-testset/bugs/00002_alter_table_mysql.sql deleted file mode 100644 index b26d9fe6828..00000000000 --- a/integration-test/src/test/resources/trino-ci-testset/bugs/00002_alter_table_mysql.sql +++ /dev/null @@ -1,16 +0,0 @@ -CREATE SCHEMA "test.jdbc-mysql".gt_db1; - -CREATE TABLE "test.jdbc-mysql".gt_db1.tb01 ( - name varchar, - salary int -); - -alter table "test.jdbc-mysql".gt_db1.tb01 rename column name to s; -show tables from "test.jdbc-mysql".gt_db1; - -comment on table "test.jdbc-mysql".gt_db1.tb01 is 'test table comments'; -show create table "test.jdbc-mysql".gt_db1.tb01; - -comment on column "test.jdbc-mysql".gt_db1.tb01.s is 'test column comments'; -show create table "test.jdbc-mysql".gt_db1.tb01; - diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00000_create_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00000_create_table.sql index 5df0dda6cd9..e37f3fb7365 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00000_create_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00000_create_table.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_hive".gt_db1; +CREATE SCHEMA gt_hive.gt_db1; -CREATE TABLE "test.gt_hive".gt_db1.tb01 ( +CREATE TABLE gt_hive.gt_db1.tb01 ( name varchar, salary int ) @@ -8,6 +8,6 @@ WITH ( format = 'TEXTFILE' ); -drop table "test.gt_hive".gt_db1.tb01; +drop table gt_hive.gt_db1.tb01; -drop schema "test.gt_hive".gt_db1; \ No newline at end of file +drop schema gt_hive.gt_db1; \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00001_select_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00001_select_table.sql index 5bd054d44d9..4057536a827 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00001_select_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00001_select_table.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_hive".gt_db1; +CREATE SCHEMA gt_hive.gt_db1; -CREATE TABLE "test.gt_hive".gt_db1.tb01 ( +CREATE TABLE gt_hive.gt_db1.tb01 ( name varchar, salary int ) @@ -8,13 +8,13 @@ WITH ( format = 'TEXTFILE' ); -insert into "test.gt_hive".gt_db1.tb01(name, salary) values ('sam', 11); -insert into "test.gt_hive".gt_db1.tb01(name, salary) values ('jerry', 13); -insert into "test.gt_hive".gt_db1.tb01(name, salary) values ('bob', 14), ('tom', 12); +insert into gt_hive.gt_db1.tb01(name, salary) values ('sam', 11); +insert into gt_hive.gt_db1.tb01(name, salary) values ('jerry', 13); +insert into gt_hive.gt_db1.tb01(name, salary) values ('bob', 14), ('tom', 12); -select * from "test.gt_hive".gt_db1.tb01 order by name; +select * from gt_hive.gt_db1.tb01 order by name; -CREATE TABLE "test.gt_hive".gt_db1.tb02 ( +CREATE TABLE gt_hive.gt_db1.tb02 ( name varchar, salary int ) @@ -22,12 +22,12 @@ WITH ( format = 'TEXTFILE' ); -insert into "test.gt_hive".gt_db1.tb02(name, salary) select distinct * from "test.gt_hive".gt_db1.tb01 order by name; +insert into gt_hive.gt_db1.tb02(name, salary) select * from gt_hive.gt_db1.tb01 order by name; -select * from "test.gt_hive".gt_db1.tb02 order by name; +select * from gt_hive.gt_db1.tb02 order by name; -drop table "test.gt_hive".gt_db1.tb02; +drop table gt_hive.gt_db1.tb02; -drop table "test.gt_hive".gt_db1.tb01; +drop table gt_hive.gt_db1.tb01; -drop schema "test.gt_hive".gt_db1; +drop schema gt_hive.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.sql index dfca4f16e19..9c9e29b4ea8 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_hive".gt_db1; +CREATE SCHEMA gt_hive.gt_db1; -CREATE TABLE "test.gt_hive".gt_db1.tb01 ( +CREATE TABLE gt_hive.gt_db1.tb01 ( name varchar, salary int, city int @@ -9,30 +9,30 @@ WITH ( format = 'TEXTFILE' ); -alter table "test.gt_hive".gt_db1.tb01 rename to "test.gt_hive".gt_db1.tb03; -show tables from "test.gt_hive".gt_db1; +alter table gt_hive.gt_db1.tb01 rename to gt_hive.gt_db1.tb03; +show tables from gt_hive.gt_db1; -alter table "test.gt_hive".gt_db1.tb03 rename to "test.gt_hive".gt_db1.tb01; -show tables from "test.gt_hive".gt_db1; +alter table gt_hive.gt_db1.tb03 rename to gt_hive.gt_db1.tb01; +show tables from gt_hive.gt_db1; -alter table "test.gt_hive".gt_db1.tb01 drop column city; -show create table "test.gt_hive".gt_db1.tb01; +alter table gt_hive.gt_db1.tb01 drop column city; +show create table gt_hive.gt_db1.tb01; -alter table "test.gt_hive".gt_db1.tb01 rename column name to s; -show create table "test.gt_hive".gt_db1.tb01; +alter table gt_hive.gt_db1.tb01 rename column name to s; +show create table gt_hive.gt_db1.tb01; -alter table "test.gt_hive".gt_db1.tb01 alter column s set data type varchar(256); -show create table "test.gt_hive".gt_db1.tb01; +alter table gt_hive.gt_db1.tb01 alter column s set data type varchar(256); +show create table gt_hive.gt_db1.tb01; -comment on table "test.gt_hive".gt_db1.tb01 is 'test table comments'; -show create table "test.gt_hive".gt_db1.tb01; +comment on table gt_hive.gt_db1.tb01 is 'test table comments'; +show create table gt_hive.gt_db1.tb01; -comment on column "test.gt_hive".gt_db1.tb01.s is 'test column comments'; -show create table "test.gt_hive".gt_db1.tb01; +comment on column gt_hive.gt_db1.tb01.s is 'test column comments'; +show create table gt_hive.gt_db1.tb01; -alter table "test.gt_hive".gt_db1.tb01 add column city varchar comment 'aaa'; -show create table "test.gt_hive".gt_db1.tb01; +alter table gt_hive.gt_db1.tb01 add column city varchar comment 'aaa'; +show create table gt_hive.gt_db1.tb01; -drop table "test.gt_hive".gt_db1.tb01; +drop table gt_hive.gt_db1.tb01; -drop schema "test.gt_hive".gt_db1; +drop schema gt_hive.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.txt index c759dfe1c95..d07f2e32e9e 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00002_alter_table.txt @@ -12,7 +12,7 @@ RENAME TABLE DROP COLUMN -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( name varchar, salary integer ) @@ -30,7 +30,7 @@ WITH ( RENAME COLUMN -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( s varchar, salary integer ) @@ -48,7 +48,7 @@ WITH ( SET COLUMN TYPE -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( s varchar(256), salary integer ) @@ -66,7 +66,7 @@ WITH ( COMMENT -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( s varchar(256), salary integer ) @@ -84,7 +84,7 @@ WITH ( COMMENT -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( s varchar(256) COMMENT 'test column comments', salary integer ) @@ -102,7 +102,7 @@ WITH ( ADD COLUMN -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( s varchar(256) COMMENT 'test column comments', salary integer, city varchar COMMENT 'aaa' diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.sql index 91daf2a79a6..c33cba46da2 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.sql @@ -7,11 +7,11 @@ call gravitino.system.create_catalog( ) ); -show catalogs like 'test.gt_hive_xxx1'; +show catalogs like 'gt_hive_xxx1'; CALL gravitino.system.drop_catalog('gt_hive_xxx1'); -show catalogs like 'test.gt_hive_xxx1'; +show catalogs like 'gt_hive_xxx1'; call gravitino.system.create_catalog( 'gt_hive_xxx1', @@ -22,6 +22,6 @@ call gravitino.system.create_catalog( ) ); -show catalogs like 'test.gt_hive_xxx1'; +show catalogs like 'gt_hive_xxx1'; CALL gravitino.system.drop_catalog('gt_hive_xxx1'); diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.txt index 45ac31e08ab..a9fb382877a 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00005_catalog.txt @@ -1,6 +1,6 @@ CALL -"test.gt_hive_xxx1" +"gt_hive_xxx1" CALL @@ -8,6 +8,6 @@ CALL CALL -"test.gt_hive_xxx1" +"gt_hive_xxx1" CALL diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.sql index a58417f6710..45941886073 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_hive".gt_db1; +CREATE SCHEMA gt_hive.gt_db1; -USE "test.gt_hive".gt_db1; +USE gt_hive.gt_db1; -- Unsupported Type: TIME CREATE TABLE tb01 ( @@ -32,4 +32,4 @@ select * from tb01 order by f1; drop table tb01; -drop schema "test.gt_hive".gt_db1 cascade; +drop schema gt_hive.gt_db1 cascade; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.txt index e4269caa0f1..33bedfdc751 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00006_datatype.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_hive"".gt_db1.tb01 ( +"CREATE TABLE gt_hive.gt_db1.tb01 ( f1 varchar(200), f2 char(20), f3 varbinary, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.sql index 54dfd8b6f67..555f5d82b80 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.sql @@ -1,47 +1,47 @@ -CREATE SCHEMA "test.gt_hive".varchar_db1; +CREATE SCHEMA gt_hive.varchar_db1; -USE "test.gt_hive".varchar_db1; +USE gt_hive.varchar_db1; CREATE TABLE tb01 (id int, name char(20)); -SHOW CREATE TABLE "test.gt_hive".varchar_db1.tb01; +SHOW CREATE TABLE gt_hive.varchar_db1.tb01; CREATE TABLE tb02 (id int, name char(255)); -SHOW CREATE TABLE "test.gt_hive".varchar_db1.tb02; +SHOW CREATE TABLE gt_hive.varchar_db1.tb02; CREATE TABLE tb03 (id int, name char(256)); CREATE TABLE tb04 (id int, name varchar(250)); -SHOW CREATE TABLE "test.gt_hive".varchar_db1.tb04; +SHOW CREATE TABLE gt_hive.varchar_db1.tb04; CREATE TABLE tb05 (id int, name varchar(65535)); -SHOW CREATE TABLE "test.gt_hive".varchar_db1.tb05; +SHOW CREATE TABLE gt_hive.varchar_db1.tb05; CREATE TABLE tb06 (id int, name char); -SHOW CREATE TABLE "test.gt_hive".varchar_db1.tb06; +SHOW CREATE TABLE gt_hive.varchar_db1.tb06; CREATE TABLE tb07 (id int, name varchar); -SHOW CREATE TABLE "test.gt_hive".varchar_db1.tb07; +SHOW CREATE TABLE gt_hive.varchar_db1.tb07; CREATE TABLE tb08 (id int, name varchar(65536)); -drop table "test.gt_hive".varchar_db1.tb01; +drop table gt_hive.varchar_db1.tb01; -drop table "test.gt_hive".varchar_db1.tb02; +drop table gt_hive.varchar_db1.tb02; -drop table "test.gt_hive".varchar_db1.tb04; +drop table gt_hive.varchar_db1.tb04; -drop table "test.gt_hive".varchar_db1.tb05; +drop table gt_hive.varchar_db1.tb05; -drop table "test.gt_hive".varchar_db1.tb06; +drop table gt_hive.varchar_db1.tb06; -drop table "test.gt_hive".varchar_db1.tb07; +drop table gt_hive.varchar_db1.tb07; -drop schema "test.gt_hive".varchar_db1; +drop schema gt_hive.varchar_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.txt index b69411fb752..77d6e774295 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/hive/00007_varchar.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_hive"".varchar_db1.tb01 ( +"CREATE TABLE gt_hive.varchar_db1.tb01 ( id integer, name char(20) ) @@ -20,7 +20,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_hive"".varchar_db1.tb02 ( +"CREATE TABLE gt_hive.varchar_db1.tb02 ( id integer, name char(255) ) @@ -38,7 +38,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_hive"".varchar_db1.tb04 ( +"CREATE TABLE gt_hive.varchar_db1.tb04 ( id integer, name varchar(250) ) @@ -54,7 +54,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_hive"".varchar_db1.tb05 ( +"CREATE TABLE gt_hive.varchar_db1.tb05 ( id integer, name varchar(65535) ) @@ -70,7 +70,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_hive"".varchar_db1.tb06 ( +"CREATE TABLE gt_hive.varchar_db1.tb06 ( id integer, name char(1) ) @@ -86,7 +86,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_hive"".varchar_db1.tb07 ( +"CREATE TABLE gt_hive.varchar_db1.tb07 ( id integer, name varchar ) diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql index 09e50a1becb..e3804cde4a2 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.sql @@ -1,61 +1,61 @@ -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -SHOW SCHEMAS FROM "test.gt_mysql" like 'gt_db1'; +SHOW SCHEMAS FROM gt_mysql like 'gt_db1'; -SHOW CREATE SCHEMA "test.gt_mysql".gt_db1; +SHOW CREATE SCHEMA gt_mysql.gt_db1; -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -CREATE SCHEMA IF NOT EXISTS "test.gt_mysql".gt_db1; +CREATE SCHEMA IF NOT EXISTS gt_mysql.gt_db1; -CREATE SCHEMA IF NOT EXISTS "test.gt_mysql".gt_db2; +CREATE SCHEMA IF NOT EXISTS gt_mysql.gt_db2; -SHOW SCHEMAS FROM "test.gt_mysql" like 'gt_db2'; +SHOW SCHEMAS FROM gt_mysql like 'gt_db2'; -CREATE TABLE "test.gt_mysql".gt_db1.tb01 ( +CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary int ); -SHOW CREATE TABLE "test.gt_mysql".gt_db1.tb01; +SHOW CREATE TABLE gt_mysql.gt_db1.tb01; -SHOW tables FROM "test.gt_mysql".gt_db1 like 'tb01'; +SHOW tables FROM gt_mysql.gt_db1 like 'tb01'; -CREATE TABLE "test.gt_mysql".gt_db1.tb01 ( +CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary int ); -CREATE TABLE IF NOT EXISTS "test.gt_mysql".gt_db1.tb01 ( +CREATE TABLE IF NOT EXISTS gt_mysql.gt_db1.tb01 ( name varchar(200), salary int ); -CREATE TABLE IF NOT EXISTS "test.gt_mysql".gt_db1.tb02 ( +CREATE TABLE IF NOT EXISTS gt_mysql.gt_db1.tb02 ( name varchar(200), salary int ); -SHOW tables FROM "test.gt_mysql".gt_db1 like 'tb02'; +SHOW tables FROM gt_mysql.gt_db1 like 'tb02'; -DROP TABLE "test.gt_mysql".gt_db1.tb01; +DROP TABLE gt_mysql.gt_db1.tb01; -SHOW tables FROM "test.gt_mysql".gt_db1 like 'tb01'; +SHOW tables FROM gt_mysql.gt_db1 like 'tb01'; -DROP TABLE "test.gt_mysql".gt_db1.tb01; +DROP TABLE gt_mysql.gt_db1.tb01; -DROP TABLE IF EXISTS "test.gt_mysql".gt_db1.tb01; +DROP TABLE IF EXISTS gt_mysql.gt_db1.tb01; -DROP TABLE IF EXISTS "test.gt_mysql".gt_db1.tb02; +DROP TABLE IF EXISTS gt_mysql.gt_db1.tb02; -SHOW tables FROM "test.gt_mysql".gt_db1 like 'tb02'; +SHOW tables FROM gt_mysql.gt_db1 like 'tb02'; -DROP SCHEMA "test.gt_mysql".gt_db1; +DROP SCHEMA gt_mysql.gt_db1; -SHOW SCHEMAS FROM "test.gt_mysql" like 'gt_db1'; +SHOW SCHEMAS FROM gt_mysql like 'gt_db1'; -DROP SCHEMA IF EXISTS "test.gt_mysql".gt_db1; +DROP SCHEMA IF EXISTS gt_mysql.gt_db1; -DROP SCHEMA IF EXISTS "test.gt_mysql".gt_db2; +DROP SCHEMA IF EXISTS gt_mysql.gt_db2; -SHOW SCHEMAS FROM "test.gt_mysql" like 'gt_db2' +SHOW SCHEMAS FROM gt_mysql like 'gt_db2' diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt index 7b78fbe07b6..43da1446fc8 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00000_create_table.txt @@ -2,9 +2,9 @@ CREATE SCHEMA "gt_db1" -"CREATE SCHEMA ""test.gt_mysql"".gt_db1" +"CREATE SCHEMA gt_mysql.gt_db1" - Schema 'test.gt_mysql.gt_db1' already exists + Schema 'gt_mysql.gt_db1' already exists CREATE SCHEMA @@ -14,7 +14,7 @@ CREATE SCHEMA CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary integer ) @@ -25,7 +25,7 @@ WITH ( "tb01" - Table 'test.gt_mysql.gt_db1.tb01' already exists + Table 'gt_mysql.gt_db1.tb01' already exists CREATE TABLE @@ -37,7 +37,7 @@ DROP TABLE - Table 'test.gt_mysql.gt_db1.tb01' does not exist + Table 'gt_mysql.gt_db1.tb01' does not exist DROP TABLE diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00001_select_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00001_select_table.sql index 51fc9066bdd..ddf2105de46 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00001_select_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00001_select_table.sql @@ -1,29 +1,29 @@ -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -CREATE TABLE "test.gt_mysql".gt_db1.tb01 ( +CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary int ); -insert into "test.gt_mysql".gt_db1.tb01(name, salary) values ('sam', 11); -insert into "test.gt_mysql".gt_db1.tb01(name, salary) values ('jerry', 13); -insert into "test.gt_mysql".gt_db1.tb01(name, salary) values ('bob', 14), ('tom', 12); +insert into gt_mysql.gt_db1.tb01(name, salary) values ('sam', 11); +insert into gt_mysql.gt_db1.tb01(name, salary) values ('jerry', 13); +insert into gt_mysql.gt_db1.tb01(name, salary) values ('bob', 14), ('tom', 12); -select * from "test.gt_mysql".gt_db1.tb01 order by name; +select * from gt_mysql.gt_db1.tb01 order by name; -CREATE TABLE "test.gt_mysql".gt_db1.tb02 ( +CREATE TABLE gt_mysql.gt_db1.tb02 ( name varchar(200), salary int ); -insert into "test.gt_mysql".gt_db1.tb02(name, salary) select distinct * from "test.gt_mysql".gt_db1.tb01 order by name; +insert into gt_mysql.gt_db1.tb02(name, salary) select * from gt_mysql.gt_db1.tb01 order by name; -select * from "test.gt_mysql".gt_db1.tb02 order by name; +select * from gt_mysql.gt_db1.tb02 order by name; -select * from "test.gt_mysql".gt_db1.tb01 join "test.gt_mysql".gt_db1.tb02 t on tb01.salary = t.salary order by tb01.name; +select * from gt_mysql.gt_db1.tb01 join gt_mysql.gt_db1.tb02 t on tb01.salary = t.salary order by tb01.name; -drop table "test.gt_mysql".gt_db1.tb02; +drop table gt_mysql.gt_db1.tb02; -drop table "test.gt_mysql".gt_db1.tb01; +drop table gt_mysql.gt_db1.tb01; -drop schema "test.gt_mysql".gt_db1; +drop schema gt_mysql.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.sql index 0ce7326cadf..b3af09a6580 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.sql @@ -1,42 +1,42 @@ -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -CREATE TABLE "test.gt_mysql".gt_db1.tb01 ( +CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary int, city int ); -alter table "test.gt_mysql".gt_db1.tb01 rename to "test.gt_mysql".gt_db1.tb03; -show tables from "test.gt_mysql".gt_db1; +alter table gt_mysql.gt_db1.tb01 rename to gt_mysql.gt_db1.tb03; +show tables from gt_mysql.gt_db1; -alter table "test.gt_mysql".gt_db1.tb03 rename to "test.gt_mysql".gt_db1.tb01; -show tables from "test.gt_mysql".gt_db1; +alter table gt_mysql.gt_db1.tb03 rename to gt_mysql.gt_db1.tb01; +show tables from gt_mysql.gt_db1; -alter table "test.gt_mysql".gt_db1.tb01 drop column city; -show create table "test.gt_mysql".gt_db1.tb01; +alter table gt_mysql.gt_db1.tb01 drop column city; +show create table gt_mysql.gt_db1.tb01; -alter table "test.gt_mysql".gt_db1.tb01 alter column salary set data type bigint; -show create table "test.gt_mysql".gt_db1.tb01; +alter table gt_mysql.gt_db1.tb01 alter column salary set data type bigint; +show create table gt_mysql.gt_db1.tb01; -comment on column "test.gt_mysql".gt_db1.tb01.name is 'test column comments'; -show create table "test.gt_mysql".gt_db1.tb01; +comment on column gt_mysql.gt_db1.tb01.name is 'test column comments'; +show create table gt_mysql.gt_db1.tb01; -comment on table "test.gt_mysql".gt_db1.tb01 is 'test table comments'; -show create table "test.gt_mysql".gt_db1.tb01; +comment on table gt_mysql.gt_db1.tb01 is 'test table comments'; +show create table gt_mysql.gt_db1.tb01; -alter table "test.gt_mysql".gt_db1.tb01 rename column name to s; -show create table "test.gt_mysql".gt_db1.tb01; +alter table gt_mysql.gt_db1.tb01 rename column name to s; +show create table gt_mysql.gt_db1.tb01; --- alter table "test.gt_mysql".gt_db1.tb01 add column city varchar(50) not null comment 'aaa'; -alter table "test.gt_mysql".gt_db1.tb01 add column city varchar(50) comment 'aaa'; -show create table "test.gt_mysql".gt_db1.tb01; +-- alter table gt_mysql.gt_db1.tb01 add column city varchar(50) not null comment 'aaa'; +alter table gt_mysql.gt_db1.tb01 add column city varchar(50) comment 'aaa'; +show create table gt_mysql.gt_db1.tb01; -alter table "test.gt_mysql".gt_db1.tb01 add column age int not null comment 'age of users'; -show create table "test.gt_mysql".gt_db1.tb01; +alter table gt_mysql.gt_db1.tb01 add column age int not null comment 'age of users'; +show create table gt_mysql.gt_db1.tb01; -alter table "test.gt_mysql".gt_db1.tb01 add column address varchar(200) not null comment 'address of users'; -show create table "test.gt_mysql".gt_db1.tb01; +alter table gt_mysql.gt_db1.tb01 add column address varchar(200) not null comment 'address of users'; +show create table gt_mysql.gt_db1.tb01; -drop table "test.gt_mysql".gt_db1.tb01; +drop table gt_mysql.gt_db1.tb01; -drop schema "test.gt_mysql".gt_db1; +drop schema gt_mysql.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.txt index 2ace13f5db0..3aa3144935c 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00002_alter_table.txt @@ -12,7 +12,7 @@ RENAME TABLE DROP COLUMN -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary integer ) @@ -23,7 +23,7 @@ WITH ( SET COLUMN TYPE -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200), salary bigint ) @@ -34,7 +34,7 @@ WITH ( COMMENT -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200) COMMENT 'test column comments', salary bigint ) @@ -45,7 +45,7 @@ WITH ( COMMENT -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( name varchar(200) COMMENT 'test column comments', salary bigint ) @@ -56,7 +56,7 @@ WITH ( RENAME COLUMN -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( s varchar(200) COMMENT 'test column comments', salary bigint ) @@ -67,7 +67,7 @@ WITH ( ADD COLUMN -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( s varchar(200) COMMENT 'test column comments', salary bigint, city varchar(50) COMMENT 'aaa' @@ -79,7 +79,7 @@ WITH ( ADD COLUMN -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( s varchar(200) COMMENT 'test column comments', salary bigint, city varchar(50) COMMENT 'aaa', @@ -92,7 +92,7 @@ WITH ( ADD COLUMN -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( s varchar(200) COMMENT 'test column comments', salary bigint, city varchar(50) COMMENT 'aaa', diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00003_use.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00003_use.sql index 0b568e506a2..7d2594b0579 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00003_use.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00003_use.sql @@ -1,13 +1,13 @@ -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -USE "test.gt_mysql".gt_db1; +USE gt_mysql.gt_db1; CREATE TABLE tb01 ( name varchar(200), salary int ); -show tables from "test.gt_mysql".gt_db1; +show tables from gt_mysql.gt_db1; show tables; @@ -15,11 +15,11 @@ use tpch.tiny; show tables; -USE "test.gt_mysql".gt_db1; +USE gt_mysql.gt_db1; show tables; drop table tb01; -drop schema "test.gt_mysql".gt_db1; +drop schema gt_mysql.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.sql index 770e5a25304..a0fb5a60813 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -USE "test.gt_mysql".gt_db1; +USE gt_mysql.gt_db1; CREATE TABLE customer ( custkey bigint NOT NULL, @@ -48,4 +48,4 @@ drop table customer; drop table orders; -drop schema "test.gt_mysql".gt_db1; +drop schema gt_mysql.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.txt index 3d4c52bc990..5e8e51a098d 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00004_query_pushdown.txt @@ -12,35 +12,35 @@ INSERT: 15000 rows "Trino version: % % - └─ TableScan[table = test.gt_mysql:gt_db1.customer gt_db1.customer limit=10 columns=[custkey:bigint:BIGINT]] + └─ TableScan[table = gt_mysql:gt_db1.customer gt_db1.customer limit=10 columns=[custkey:bigint:BIGINT]] Layout: [custkey:bigint] % " "Trino version: % % - └─ ScanFilter[table = test.gt_mysql:gt_db1:customer, filterPredicate = ""$like""(""phone"", ""$literal$""(from_base64('DgAAAFZBUklBQkxFX1dJRFRIAQAAAAEAAAALAAAAAAsAAAAGAAAAJTIzNDIlAA==')))] + └─ ScanFilter[table = gt_mysql:gt_db1:customer, filterPredicate = ""$like""(""phone"", ""$literal$""(from_base64('DgAAAFZBUklBQkxFX1dJRFRIAQAAAAEAAAALAAAAAAsAAAAGAAAAJTIzNDIlAA==')))] Layout: [custkey:bigint, name:varchar(25), address:varchar(40), nationkey:bigint, phone:varchar(15), acctbal:decimal(12,2), mktsegment:varchar(10), comment:varchar(117)] % " "Trino version: % % - └─ TableScan[table = test.gt_mysql:Query[SELECT sum(`totalprice`) AS `_pfgnrtd_0` FROM `gt_db1`.`orders`] columns=[_pfgnrtd_0:decimal(38,2):decimal]] + └─ TableScan[table = gt_mysql:Query[SELECT sum(`totalprice`) AS `_pfgnrtd_0` FROM `gt_db1`.`orders`] columns=[_pfgnrtd_0:decimal(38,2):decimal]] Layout: [_pfgnrtd:decimal(38,2)] % " "Trino version: % % - └─ TableScan[table = test.gt_mysql:Query[SELECT `orderdate`, sum(`totalprice`) AS `_pfgnrtd_0` FROM `gt_db1`.`orders` GROUP BY `orderdate`] sortOrder=[orderdate:date:DATE ASC NULLS LAST] limit=10 columns=[orderdate:date:DATE, _pfgnrtd_0:decimal(38,2):decimal]] + └─ TableScan[table = gt_mysql:Query[SELECT `orderdate`, sum(`totalprice`) AS `_pfgnrtd_0` FROM `gt_db1`.`orders` GROUP BY `orderdate`] sortOrder=[orderdate:date:DATE ASC NULLS LAST] limit=10 columns=[orderdate:date:DATE, _pfgnrtd_0:decimal(38,2):decimal]] Layout: [orderdate:date, _pfgnrtd:decimal(38,2)] % " "Trino version: % % - └─ TableScan[table = test.gt_mysql:Query[SELECT % INNER JOIN %] limit=10 columns=%]] + └─ TableScan[table = gt_mysql:Query[SELECT % INNER JOIN %] limit=10 columns=%]] % " diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.sql index 8236cca3508..dec476d9590 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.sql @@ -7,7 +7,7 @@ call gravitino.system.create_catalog( ) ); -show catalogs like 'test.gt_mysql_xxx1'; +show catalogs like 'gt_mysql_xxx1'; call gravitino.system.create_catalog( 'gt_mysql_xxx1', @@ -30,7 +30,7 @@ call gravitino.system.create_catalog( CALL gravitino.system.drop_catalog('gt_mysql_xxx1'); -show catalogs like 'test.gt_mysql_xxx1'; +show catalogs like 'gt_mysql_xxx1'; CALL gravitino.system.drop_catalog('gt_mysql_xxx1'); @@ -45,9 +45,9 @@ call gravitino.system.create_catalog( ) ); -show catalogs like 'test.gt_mysql_xxx1'; +show catalogs like 'gt_mysql_xxx1'; CALL gravitino.system.drop_catalog( catalog => 'gt_mysql_xxx1', ignore_not_exist => true); -show catalogs like 'test.gt_mysql_xxx1'; +show catalogs like 'gt_mysql_xxx1'; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.txt index 198a59bacb9..d17eaa68f90 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00005_create_catalog.txt @@ -1,6 +1,6 @@ CALL -"test.gt_mysql_xxx1" +"gt_mysql_xxx1" Catalog test.gt_mysql_xxx1 already exists. @@ -16,7 +16,7 @@ CALL CALL -"test.gt_mysql_xxx1" +"gt_mysql_xxx1" CALL diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.sql index 531d5d9921c..f6f0ec755fb 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_mysql".gt_db1; +CREATE SCHEMA gt_mysql.gt_db1; -USE "test.gt_mysql".gt_db1; +USE gt_mysql.gt_db1; -- Unsupported Type: BOOLEAN CREATE TABLE tb01 ( @@ -68,4 +68,4 @@ drop table tb01; drop table tb02; -drop schema "test.gt_mysql".gt_db1 cascade; +drop schema gt_mysql.gt_db1 cascade; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.txt index d2629bda0d5..fb63415a854 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00006_datatype.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb01 ( +"CREATE TABLE gt_mysql.gt_db1.tb01 ( f1 varchar(200), f2 char(20), f3 varbinary, @@ -34,7 +34,7 @@ INSERT: 1 row CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".gt_db1.tb02 ( +"CREATE TABLE gt_mysql.gt_db1.tb02 ( f1 varchar(200) NOT NULL, f2 char(20) NOT NULL, f3 varbinary NOT NULL, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.sql index a0143719f78..b1277532b4d 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.sql @@ -1,44 +1,44 @@ -CREATE SCHEMA "test.gt_mysql".varchar_db1; +CREATE SCHEMA gt_mysql.varchar_db1; -USE "test.gt_mysql".varchar_db1; +USE gt_mysql.varchar_db1; CREATE TABLE tb01 (id int, name char(20)); -SHOW CREATE TABLE "test.gt_mysql".varchar_db1.tb01; +SHOW CREATE TABLE gt_mysql.varchar_db1.tb01; CREATE TABLE tb02 (id int, name char(255)); -SHOW CREATE TABLE "test.gt_mysql".varchar_db1.tb02; +SHOW CREATE TABLE gt_mysql.varchar_db1.tb02; CREATE TABLE tb03 (id int, name char(256)); CREATE TABLE tb04 (id int, name varchar(250)); -SHOW CREATE TABLE "test.gt_mysql".varchar_db1.tb04; +SHOW CREATE TABLE gt_mysql.varchar_db1.tb04; CREATE TABLE tb05 (id int, name varchar(256)); -SHOW CREATE TABLE "test.gt_mysql".varchar_db1.tb05; +SHOW CREATE TABLE gt_mysql.varchar_db1.tb05; CREATE TABLE tb06 (id int, name char); -SHOW CREATE TABLE "test.gt_mysql".varchar_db1.tb06; +SHOW CREATE TABLE gt_mysql.varchar_db1.tb06; CREATE TABLE tb07 (id int, name varchar); -SHOW CREATE TABLE "test.gt_mysql".varchar_db1.tb07; +SHOW CREATE TABLE gt_mysql.varchar_db1.tb07; -drop table "test.gt_mysql".varchar_db1.tb01; +drop table gt_mysql.varchar_db1.tb01; -drop table "test.gt_mysql".varchar_db1.tb02; +drop table gt_mysql.varchar_db1.tb02; -drop table "test.gt_mysql".varchar_db1.tb04; +drop table gt_mysql.varchar_db1.tb04; -drop table "test.gt_mysql".varchar_db1.tb05; +drop table gt_mysql.varchar_db1.tb05; -drop table "test.gt_mysql".varchar_db1.tb06; +drop table gt_mysql.varchar_db1.tb06; -drop table "test.gt_mysql".varchar_db1.tb07; +drop table gt_mysql.varchar_db1.tb07; -drop schema "test.gt_mysql".varchar_db1; +drop schema gt_mysql.varchar_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.txt index cbad718d69b..a239050a776 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00007_varchar.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".varchar_db1.tb01 ( +"CREATE TABLE gt_mysql.varchar_db1.tb01 ( id integer, name char(20) ) @@ -15,7 +15,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".varchar_db1.tb02 ( +"CREATE TABLE gt_mysql.varchar_db1.tb02 ( id integer, name char(255) ) @@ -28,7 +28,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".varchar_db1.tb04 ( +"CREATE TABLE gt_mysql.varchar_db1.tb04 ( id integer, name varchar(250) ) @@ -39,7 +39,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".varchar_db1.tb05 ( +"CREATE TABLE gt_mysql.varchar_db1.tb05 ( id integer, name varchar(256) ) @@ -50,7 +50,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".varchar_db1.tb06 ( +"CREATE TABLE gt_mysql.varchar_db1.tb06 ( id integer, name char(1) ) @@ -61,7 +61,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_mysql"".varchar_db1.tb07 ( +"CREATE TABLE gt_mysql.varchar_db1.tb07 ( id integer, name varchar ) diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.sql index dba2caf829e..46d8b8c8034 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.sql @@ -7,7 +7,7 @@ call gravitino.system.create_catalog( ) ); -select * from gravitino.system.catalog where name = 'test.gt_mysql_xxx1'; +select * from gravitino.system.catalog where name = 'gt_mysql_xxx1'; call gravitino.system.alter_catalog( 'gt_mysql_xxx1', @@ -17,7 +17,7 @@ call gravitino.system.alter_catalog( ) ); -select * from gravitino.system.catalog where name = 'test.gt_mysql_xxx1'; +select * from gravitino.system.catalog where name = 'gt_mysql_xxx1'; call gravitino.system.alter_catalog( 'gt_mysql_xxx1', @@ -25,7 +25,7 @@ call gravitino.system.alter_catalog( array['join-pushdown.strategy'] ); -select * from gravitino.system.catalog where name = 'test.gt_mysql_xxx1'; +select * from gravitino.system.catalog where name = 'gt_mysql_xxx1'; call gravitino.system.alter_catalog( catalog => 'gt_mysql_xxx1', @@ -36,6 +36,6 @@ call gravitino.system.alter_catalog( remove_properties => array['test_key'] ); -select * from gravitino.system.catalog where name = 'test.gt_mysql_xxx1'; +select * from gravitino.system.catalog where name = 'gt_mysql_xxx1'; call gravitino.system.drop_catalog('gt_mysql_xxx1'); \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.txt index 6f62f062010..b0e3aac0dcc 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-mysql/00008_alter_catalog.txt @@ -1,17 +1,17 @@ CALL -"test.gt_mysql_xxx1","jdbc-mysql","{""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver""}" +"gt_mysql_xxx1","jdbc-mysql","{""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver""}" CALL -"test.gt_mysql_xxx1","jdbc-mysql","{""join-pushdown.strategy"":""EAGER"",""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver"",""test_key"":""test_value""}" +"gt_mysql_xxx1","jdbc-mysql","{""join-pushdown.strategy"":""EAGER"",""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver"",""test_key"":""test_value""}" CALL -"test.gt_mysql_xxx1","jdbc-mysql","{""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver"",""test_key"":""test_value""}" +"gt_mysql_xxx1","jdbc-mysql","{""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver"",""test_key"":""test_value""}" CALL -"test.gt_mysql_xxx1","jdbc-mysql","{""join-pushdown.strategy"":""EAGER"",""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver""}" +"gt_mysql_xxx1","jdbc-mysql","{""join-pushdown.strategy"":""EAGER"",""jdbc-url"":""jdbc:mysql://%/?useSSL=false"",""jdbc-user"":""trino"",""jdbc-password"":""ds123"",""jdbc-driver"":""com.mysql.cj.jdbc.Driver""}" CALL \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql index 0a2f81a7ef9..9705111cd44 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00000_create_table.sql @@ -1,10 +1,10 @@ -CREATE SCHEMA "test.gt_postgresql".gt_db1; +CREATE SCHEMA gt_postgresql.gt_db1; -CREATE TABLE "test.gt_postgresql".gt_db1.tb01 ( +CREATE TABLE gt_postgresql.gt_db1.tb01 ( name varchar, salary int ); -drop table "test.gt_postgresql".gt_db1.tb01; +drop table gt_postgresql.gt_db1.tb01; -drop schema "test.gt_postgresql".gt_db1; \ No newline at end of file +drop schema gt_postgresql.gt_db1; \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql index e595cfabd11..b084c6568b3 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00001_select_table.sql @@ -1,27 +1,27 @@ -CREATE SCHEMA "test.gt_postgresql".gt_db1; +CREATE SCHEMA gt_postgresql.gt_db1; -CREATE TABLE "test.gt_postgresql".gt_db1.tb01 ( +CREATE TABLE gt_postgresql.gt_db1.tb01 ( name varchar, salary int ); -insert into "test.gt_postgresql".gt_db1.tb01(name, salary) values ('sam', 11); -insert into "test.gt_postgresql".gt_db1.tb01(name, salary) values ('jerry', 13); -insert into "test.gt_postgresql".gt_db1.tb01(name, salary) values ('bob', 14), ('tom', 12); +insert into gt_postgresql.gt_db1.tb01(name, salary) values ('sam', 11); +insert into gt_postgresql.gt_db1.tb01(name, salary) values ('jerry', 13); +insert into gt_postgresql.gt_db1.tb01(name, salary) values ('bob', 14), ('tom', 12); -select * from "test.gt_postgresql".gt_db1.tb01 order by name; +select * from gt_postgresql.gt_db1.tb01 order by name; -CREATE TABLE "test.gt_postgresql".gt_db1.tb02 ( +CREATE TABLE gt_postgresql.gt_db1.tb02 ( name varchar, salary int ); -insert into "test.gt_postgresql".gt_db1.tb02(name, salary) select distinct * from "test.gt_postgresql".gt_db1.tb01 order by name; +insert into gt_postgresql.gt_db1.tb02(name, salary) select * from gt_postgresql.gt_db1.tb01 order by name; -select * from "test.gt_postgresql".gt_db1.tb02 order by name; +select * from gt_postgresql.gt_db1.tb02 order by name; -drop table "test.gt_postgresql".gt_db1.tb02; +drop table gt_postgresql.gt_db1.tb02; -drop table "test.gt_postgresql".gt_db1.tb01; +drop table gt_postgresql.gt_db1.tb01; -drop schema "test.gt_postgresql".gt_db1; \ No newline at end of file +drop schema gt_postgresql.gt_db1; \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql index 96214720c43..da78532d10e 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.sql @@ -1,35 +1,35 @@ -CREATE SCHEMA "test.gt_postgresql".gt_db1; +CREATE SCHEMA gt_postgresql.gt_db1; -CREATE TABLE "test.gt_postgresql".gt_db1.tb01 ( +CREATE TABLE gt_postgresql.gt_db1.tb01 ( name varchar, salary int, city int ); -alter table "test.gt_postgresql".gt_db1.tb01 rename to "test.gt_postgresql".gt_db1.tb03; -show tables from "test.gt_postgresql".gt_db1; +alter table gt_postgresql.gt_db1.tb01 rename to gt_postgresql.gt_db1.tb03; +show tables from gt_postgresql.gt_db1; -alter table "test.gt_postgresql".gt_db1.tb03 rename to "test.gt_postgresql".gt_db1.tb01; -show tables from "test.gt_postgresql".gt_db1; +alter table gt_postgresql.gt_db1.tb03 rename to gt_postgresql.gt_db1.tb01; +show tables from gt_postgresql.gt_db1; -alter table "test.gt_postgresql".gt_db1.tb01 drop column city; -show create table "test.gt_postgresql".gt_db1.tb01; +alter table gt_postgresql.gt_db1.tb01 drop column city; +show create table gt_postgresql.gt_db1.tb01; -alter table "test.gt_postgresql".gt_db1.tb01 alter column salary set data type bigint; -show create table "test.gt_postgresql".gt_db1.tb01; +alter table gt_postgresql.gt_db1.tb01 alter column salary set data type bigint; +show create table gt_postgresql.gt_db1.tb01; -comment on table "test.gt_postgresql".gt_db1.tb01 is 'test table comments'; -show create table "test.gt_postgresql".gt_db1.tb01; +comment on table gt_postgresql.gt_db1.tb01 is 'test table comments'; +show create table gt_postgresql.gt_db1.tb01; -alter table "test.gt_postgresql".gt_db1.tb01 rename column name to s; -show create table "test.gt_postgresql".gt_db1.tb01; +alter table gt_postgresql.gt_db1.tb01 rename column name to s; +show create table gt_postgresql.gt_db1.tb01; -comment on column "test.gt_postgresql".gt_db1.tb01.s is 'test column comments'; -show create table "test.gt_postgresql".gt_db1.tb01; +comment on column gt_postgresql.gt_db1.tb01.s is 'test column comments'; +show create table gt_postgresql.gt_db1.tb01; -alter table "test.gt_postgresql".gt_db1.tb01 add column city varchar comment 'aaa'; -show create table "test.gt_postgresql".gt_db1.tb01; +alter table gt_postgresql.gt_db1.tb01 add column city varchar comment 'aaa'; +show create table gt_postgresql.gt_db1.tb01; -drop table "test.gt_postgresql".gt_db1.tb01; +drop table gt_postgresql.gt_db1.tb01; -drop schema "test.gt_postgresql".gt_db1; +drop schema gt_postgresql.gt_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt index 72fb2754ff4..e34adddb64f 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00002_alter_table.txt @@ -12,7 +12,7 @@ RENAME TABLE DROP COLUMN -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( name varchar, salary integer ) @@ -20,7 +20,7 @@ COMMENT ''" SET COLUMN TYPE -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( name varchar, salary bigint ) @@ -28,7 +28,7 @@ COMMENT ''" COMMENT -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( name varchar, salary bigint ) @@ -36,7 +36,7 @@ COMMENT 'test table comments'" RENAME COLUMN -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( s varchar, salary bigint ) @@ -44,7 +44,7 @@ COMMENT 'test table comments'" COMMENT -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( s varchar COMMENT 'test column comments', salary bigint ) @@ -52,7 +52,7 @@ COMMENT 'test table comments'" ADD COLUMN -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( s varchar COMMENT 'test column comments', salary bigint, city varchar COMMENT 'aaa' diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00003_join_pushdown.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00003_join_pushdown.sql index aca90645814..a0c47d71625 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00003_join_pushdown.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00003_join_pushdown.sql @@ -1,15 +1,15 @@ -CREATE SCHEMA "test.gt_postgresql".gt_db1; +CREATE SCHEMA gt_postgresql.gt_db1; -use "test.gt_postgresql".gt_db1; +use gt_postgresql.gt_db1; -CREATE TABLE "test.gt_postgresql".gt_db1.employee_performance ( +CREATE TABLE gt_postgresql.gt_db1.employee_performance ( employee_id integer, evaluation_date date, rating integer ) COMMENT 'comment'; -CREATE TABLE "test.gt_postgresql".gt_db1.employees ( +CREATE TABLE gt_postgresql.gt_db1.employees ( employee_id integer, department_id integer, job_title varchar(100), @@ -20,7 +20,7 @@ CREATE TABLE "test.gt_postgresql".gt_db1.employees ( ) COMMENT 'comment'; -INSERT INTO "test.gt_postgresql".gt_db1.employee_performance (employee_id, evaluation_date, rating) VALUES +INSERT INTO gt_postgresql.gt_db1.employee_performance (employee_id, evaluation_date, rating) VALUES (1, DATE '2018-02-24', 4), (1, DATE '2016-12-25', 7), (1, DATE '2023-04-07', 4), @@ -32,7 +32,7 @@ INSERT INTO "test.gt_postgresql".gt_db1.employee_performance (employee_id, evalu (3, DATE '2021-01-05', 6), (3, DATE '2014-10-24', 4); -INSERT INTO "test.gt_postgresql".gt_db1.employees (employee_id, department_id, job_title, given_name, family_name, birth_date, hire_date) VALUES +INSERT INTO gt_postgresql.gt_db1.employees (employee_id, department_id, job_title, given_name, family_name, birth_date, hire_date) VALUES (1, 1, 'Manager', 'Gregory', 'Smith', DATE '1968-04-15', DATE '2014-06-04'), (2, 1, 'Sales Assistant', 'Owen', 'Rivers', DATE '1988-08-13', DATE '2021-02-05'), (3, 1, 'Programmer', 'Avram', 'Lawrence', DATE '1969-11-21', DATE '2010-09-29'), @@ -48,14 +48,14 @@ SELECT given_name, family_name, rating -FROM "test.gt_postgresql".gt_db1.employee_performance AS p -JOIN "test.gt_postgresql".gt_db1.employees AS e +FROM gt_postgresql.gt_db1.employee_performance AS p +JOIN gt_postgresql.gt_db1.employees AS e ON p.employee_id = e.employee_id ORDER BY rating DESC, given_name LIMIT 10; -drop table "test.gt_postgresql".gt_db1.employee_performance; -drop table "test.gt_postgresql".gt_db1.employees; +drop table gt_postgresql.gt_db1.employee_performance; +drop table gt_postgresql.gt_db1.employees; -drop schema "test.gt_postgresql".gt_db1; \ No newline at end of file +drop schema gt_postgresql.gt_db1; \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.sql index d5a16ec5880..59760a2a31e 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_postgresql".gt_db1; +CREATE SCHEMA gt_postgresql.gt_db1; -use "test.gt_postgresql".gt_db1; +use gt_postgresql.gt_db1; CREATE TABLE customer ( custkey bigint NOT NULL, @@ -48,4 +48,4 @@ drop table customer; drop table orders; -drop schema "test.gt_postgresql".gt_db1;; +drop schema gt_postgresql.gt_db1;; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.txt index d5fe30a8584..a823283fe1e 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00004_query_pushdown.txt @@ -12,35 +12,35 @@ INSERT: 15000 rows "Trino version: % % - └─ TableScan[table = test.gt_postgresql:gt_db1.customer gt_db1.customer limit=10 columns=[custkey:bigint:int8]] + └─ TableScan[table = gt_postgresql:gt_db1.customer gt_db1.customer limit=10 columns=[custkey:bigint:int8]] Layout: [custkey:bigint] % " "Trino version: % % - └─ TableScan[table = test.gt_postgresql:gt_db1.customer gt_db1.customer constraints=[ParameterizedExpression[expression=(""phone"") LIKE (?), parameters=[QueryParameter{jdbcType=Optional.empty, type=varchar(6), value=Optional[Slice{base=[B@%, baseOffset=0, length=6}]}]]] limit=10] + └─ TableScan[table = gt_postgresql:gt_db1.customer gt_db1.customer constraints=[ParameterizedExpression[expression=(""phone"") LIKE (?), parameters=[QueryParameter{jdbcType=Optional.empty, type=varchar(6), value=Optional[Slice{base=[B@%, baseOffset=0, length=6}]}]]] limit=10] Layout: [custkey:bigint, name:varchar(25), address:varchar(40), nationkey:bigint, phone:varchar(15), acctbal:decimal(12,2), mktsegment:varchar(10), comment:varchar(117)] % " "Trino version: % % - └─ TableScan[table = test.gt_postgresql:Query[SELECT sum(""totalprice"") AS ""_pfgnrtd_0"" FROM ""gt_db1"".""orders""] columns=[_pfgnrtd_0:decimal(38,2):decimal]] + └─ TableScan[table = gt_postgresql:Query[SELECT sum(""totalprice"") AS ""_pfgnrtd_0"" FROM ""gt_db1"".""orders""] columns=[_pfgnrtd_0:decimal(38,2):decimal]] Layout: [_pfgnrtd:decimal(38,2)] % " "Trino version: % % - └─ TableScan[table = test.gt_postgresql:Query[SELECT ""orderdate"", sum(""totalprice"") AS ""_pfgnrtd_0"" FROM ""gt_db1"".""orders"" GROUP BY ""orderdate""] sortOrder=[orderdate:date:date ASC NULLS LAST] limit=10 columns=[orderdate:date:date, _pfgnrtd_0:decimal(38,2):decimal]] + └─ TableScan[table = gt_postgresql:Query[SELECT ""orderdate"", sum(""totalprice"") AS ""_pfgnrtd_0"" FROM ""gt_db1"".""orders"" GROUP BY ""orderdate""] sortOrder=[orderdate:date:date ASC NULLS LAST] limit=10 columns=[orderdate:date:date, _pfgnrtd_0:decimal(38,2):decimal]] Layout: [orderdate:date, _pfgnrtd:decimal(38,2)] % " "Trino version: % % - TableScan[table = test.gt_postgresql:Query[SELECT % INNER JOIN %] limit=10 columns=%] + TableScan[table = gt_postgresql:Query[SELECT % INNER JOIN %] limit=10 columns=%] % " diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql index d67997d2b8c..0f3327f1045 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_postgresql".gt_db1; +CREATE SCHEMA gt_postgresql.gt_db1; -USE "test.gt_postgresql".gt_db1; +USE gt_postgresql.gt_db1; -- Unsupported Type: TINYINT CREATE TABLE tb01 ( @@ -69,4 +69,4 @@ drop table tb01; drop table tb02; -drop schema "test.gt_postgresql".gt_db1 cascade; +drop schema gt_postgresql.gt_db1 cascade; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.txt index 4c3789a3513..42a88b77053 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00006_datatype.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb01 ( +"CREATE TABLE gt_postgresql.gt_db1.tb01 ( f1 varchar(200), f2 char(20), f3 varbinary, @@ -31,7 +31,7 @@ INSERT: 1 row CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".gt_db1.tb02 ( +"CREATE TABLE gt_postgresql.gt_db1.tb02 ( f1 varchar(200) NOT NULL, f2 char(20) NOT NULL, f3 varbinary NOT NULL, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.sql index b21f99f9d2a..8e911501beb 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.sql @@ -1,46 +1,46 @@ -CREATE SCHEMA "test.gt_postgresql".varchar_db1; +CREATE SCHEMA gt_postgresql.varchar_db1; -USE "test.gt_postgresql".varchar_db1; +USE gt_postgresql.varchar_db1; CREATE TABLE tb01 (id int, name char(20)); -SHOW CREATE TABLE "test.gt_postgresql".varchar_db1.tb01; +SHOW CREATE TABLE gt_postgresql.varchar_db1.tb01; CREATE TABLE tb02 (id int, name char(65536)); -SHOW CREATE TABLE "test.gt_postgresql".varchar_db1.tb02; +SHOW CREATE TABLE gt_postgresql.varchar_db1.tb02; CREATE TABLE tb03 (id int, name char(65537)); CREATE TABLE tb04 (id int, name varchar(250)); -SHOW CREATE TABLE "test.gt_postgresql".varchar_db1.tb04; +SHOW CREATE TABLE gt_postgresql.varchar_db1.tb04; CREATE TABLE tb05 (id int, name varchar(10485760)); -SHOW CREATE TABLE "test.gt_postgresql".varchar_db1.tb05; +SHOW CREATE TABLE gt_postgresql.varchar_db1.tb05; CREATE TABLE tb06 (id int, name varchar(10485761)); CREATE TABLE tb06 (id int, name char); -SHOW CREATE TABLE "test.gt_postgresql".varchar_db1.tb06; +SHOW CREATE TABLE gt_postgresql.varchar_db1.tb06; CREATE TABLE tb07 (id int, name varchar); -SHOW CREATE TABLE "test.gt_postgresql".varchar_db1.tb07; +SHOW CREATE TABLE gt_postgresql.varchar_db1.tb07; -drop table "test.gt_postgresql".varchar_db1.tb01; +drop table gt_postgresql.varchar_db1.tb01; -drop table "test.gt_postgresql".varchar_db1.tb02; +drop table gt_postgresql.varchar_db1.tb02; -drop table "test.gt_postgresql".varchar_db1.tb04; +drop table gt_postgresql.varchar_db1.tb04; -drop table "test.gt_postgresql".varchar_db1.tb05; +drop table gt_postgresql.varchar_db1.tb05; -drop table "test.gt_postgresql".varchar_db1.tb06; +drop table gt_postgresql.varchar_db1.tb06; -drop table "test.gt_postgresql".varchar_db1.tb07; +drop table gt_postgresql.varchar_db1.tb07; -drop schema "test.gt_postgresql".varchar_db1; +drop schema gt_postgresql.varchar_db1; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.txt index f7776c53bec..ad1e5b13084 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/jdbc-postgresql/00007_varchar.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".varchar_db1.tb01 ( +"CREATE TABLE gt_postgresql.varchar_db1.tb01 ( id integer, name char(20) ) @@ -12,7 +12,7 @@ COMMENT ''" CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".varchar_db1.tb02 ( +"CREATE TABLE gt_postgresql.varchar_db1.tb02 ( id integer, name char(65536) ) @@ -22,7 +22,7 @@ COMMENT ''" CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".varchar_db1.tb04 ( +"CREATE TABLE gt_postgresql.varchar_db1.tb04 ( id integer, name varchar(250) ) @@ -30,7 +30,7 @@ COMMENT ''" CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".varchar_db1.tb05 ( +"CREATE TABLE gt_postgresql.varchar_db1.tb05 ( id integer, name varchar(10485760) ) @@ -40,7 +40,7 @@ COMMENT ''" CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".varchar_db1.tb06 ( +"CREATE TABLE gt_postgresql.varchar_db1.tb06 ( id integer, name char(1) ) @@ -48,7 +48,7 @@ COMMENT ''" CREATE TABLE -"CREATE TABLE ""test.gt_postgresql"".varchar_db1.tb07 ( +"CREATE TABLE gt_postgresql.varchar_db1.tb07 ( id integer, name varchar ) diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql index 83e47dda274..a2936fb1316 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.sql @@ -1,13 +1,13 @@ -CREATE SCHEMA "test.gt_iceberg".gt_db2; +CREATE SCHEMA gt_iceberg.gt_db2; -CREATE TABLE "test.gt_iceberg".gt_db2.tb01( +CREATE TABLE gt_iceberg.gt_db2.tb01( name varchar, salary int ); -show create table "test.gt_iceberg".gt_db2.tb01; +show create table gt_iceberg.gt_db2.tb01; -CREATE TABLE "test.gt_iceberg".gt_db2.tb02 ( +CREATE TABLE gt_iceberg.gt_db2.tb02 ( name varchar, salary int ) with ( @@ -15,9 +15,9 @@ CREATE TABLE "test.gt_iceberg".gt_db2.tb02 ( sorted_by = ARRAY['salary'] ); -show create table "test.gt_iceberg".gt_db2.tb02; +show create table gt_iceberg.gt_db2.tb02; -CREATE TABLE "test.gt_iceberg".gt_db2.tb03 ( +CREATE TABLE gt_iceberg.gt_db2.tb03 ( name varchar, salary int ) with ( @@ -25,7 +25,7 @@ CREATE TABLE "test.gt_iceberg".gt_db2.tb03 ( sorted_by = ARRAY['salary_wrong_name'] ); -CREATE TABLE "test.gt_iceberg".gt_db2.tb03 ( +CREATE TABLE gt_iceberg.gt_db2.tb03 ( name varchar, salary int ) with ( @@ -33,46 +33,46 @@ CREATE TABLE "test.gt_iceberg".gt_db2.tb03 ( sorted_by = ARRAY['name'] ); -show create table "test.gt_iceberg".gt_db2.tb03; +show create table gt_iceberg.gt_db2.tb03; -CREATE TABLE "test.gt_iceberg".gt_db2.tb04 ( +CREATE TABLE gt_iceberg.gt_db2.tb04 ( name varchar, salary int ) with ( sorted_by = ARRAY['name'] ); -show create table "test.gt_iceberg".gt_db2.tb04; +show create table gt_iceberg.gt_db2.tb04; -CREATE TABLE "test.gt_iceberg".gt_db2.tb05 ( +CREATE TABLE gt_iceberg.gt_db2.tb05 ( name varchar, salary int ) with ( partitioning = ARRAY['name'] ); -show create table "test.gt_iceberg".gt_db2.tb05; +show create table gt_iceberg.gt_db2.tb05; -CREATE TABLE "test.gt_iceberg".gt_db2.tb06 ( +CREATE TABLE gt_iceberg.gt_db2.tb06 ( name varchar, salary int ) with ( location = '${hdfs_uri}/user/iceberg/warehouse/TrinoQueryIT/gt_iceberg/gt_db2/tb06' ); -show create table "test.gt_iceberg".gt_db2.tb06; +show create table gt_iceberg.gt_db2.tb06; -drop table "test.gt_iceberg".gt_db2.tb01; +drop table gt_iceberg.gt_db2.tb01; -drop table "test.gt_iceberg".gt_db2.tb02; +drop table gt_iceberg.gt_db2.tb02; -drop table "test.gt_iceberg".gt_db2.tb03; +drop table gt_iceberg.gt_db2.tb03; -drop table "test.gt_iceberg".gt_db2.tb04; +drop table gt_iceberg.gt_db2.tb04; -drop table "test.gt_iceberg".gt_db2.tb05; +drop table gt_iceberg.gt_db2.tb05; -drop table "test.gt_iceberg".gt_db2.tb06; +drop table gt_iceberg.gt_db2.tb06; -drop schema "test.gt_iceberg".gt_db2; \ No newline at end of file +drop schema gt_iceberg.gt_db2; \ No newline at end of file diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt index a8fd39e3409..c0ba8a4044b 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00000_create_table.txt @@ -2,7 +2,7 @@ CREATE SCHEMA CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( name varchar, salary integer ) @@ -10,7 +10,7 @@ COMMENT ''" CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb02 ( +"CREATE TABLE gt_iceberg.gt_db2.tb02 ( name varchar, salary integer ) @@ -24,7 +24,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb03 ( +"CREATE TABLE gt_iceberg.gt_db2.tb03 ( name varchar, salary integer ) @@ -36,7 +36,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb04 ( +"CREATE TABLE gt_iceberg.gt_db2.tb04 ( name varchar, salary integer ) @@ -47,7 +47,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb05 ( +"CREATE TABLE gt_iceberg.gt_db2.tb05 ( name varchar, salary integer ) @@ -58,7 +58,7 @@ WITH ( CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb06 ( +"CREATE TABLE gt_iceberg.gt_db2.tb06 ( name varchar, salary integer ) diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00001_select_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00001_select_table.sql index 9ec231fd164..16c2b23ee27 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00001_select_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00001_select_table.sql @@ -1,28 +1,28 @@ -CREATE SCHEMA "test.gt_iceberg".gt_db2; +CREATE SCHEMA gt_iceberg.gt_db2; -CREATE TABLE "test.gt_iceberg".gt_db2.tb01 ( +CREATE TABLE gt_iceberg.gt_db2.tb01 ( name varchar, salary int ); -insert into "test.gt_iceberg".gt_db2.tb01(name, salary) values ('sam', 11); -insert into "test.gt_iceberg".gt_db2.tb01(name, salary) values ('jerry', 13); -insert into "test.gt_iceberg".gt_db2.tb01(name, salary) values ('bob', 14), ('tom', 12); +insert into gt_iceberg.gt_db2.tb01(name, salary) values ('sam', 11); +insert into gt_iceberg.gt_db2.tb01(name, salary) values ('jerry', 13); +insert into gt_iceberg.gt_db2.tb01(name, salary) values ('bob', 14), ('tom', 12); -select * from "test.gt_iceberg".gt_db2.tb01 order by name; +select * from gt_iceberg.gt_db2.tb01 order by name; -CREATE TABLE "test.gt_iceberg".gt_db2.tb02 ( +CREATE TABLE gt_iceberg.gt_db2.tb02 ( name varchar, salary int ); -insert into "test.gt_iceberg".gt_db2.tb02(name, salary) select distinct * from "test.gt_iceberg".gt_db2.tb01 order by name; +insert into gt_iceberg.gt_db2.tb02(name, salary) select * from gt_iceberg.gt_db2.tb01 order by name; -select * from "test.gt_iceberg".gt_db2.tb02 order by name; +select * from gt_iceberg.gt_db2.tb02 order by name; -drop table "test.gt_iceberg".gt_db2.tb02; +drop table gt_iceberg.gt_db2.tb02; -drop table "test.gt_iceberg".gt_db2.tb01; +drop table gt_iceberg.gt_db2.tb01; -drop schema "test.gt_iceberg".gt_db2; +drop schema gt_iceberg.gt_db2; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.sql index 3450a23a2d2..b31831e3570 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.sql @@ -1,35 +1,35 @@ -CREATE SCHEMA "test.gt_iceberg".gt_db2; +CREATE SCHEMA gt_iceberg.gt_db2; -CREATE TABLE "test.gt_iceberg".gt_db2.tb01 ( +CREATE TABLE gt_iceberg.gt_db2.tb01 ( name varchar, salary int, city int ); -alter table "test.gt_iceberg".gt_db2.tb01 rename to "test.gt_iceberg".gt_db2.tb03; -show tables from "test.gt_iceberg".gt_db2; +alter table gt_iceberg.gt_db2.tb01 rename to gt_iceberg.gt_db2.tb03; +show tables from gt_iceberg.gt_db2; -alter table "test.gt_iceberg".gt_db2.tb03 rename to "test.gt_iceberg".gt_db2.tb01; -show tables from "test.gt_iceberg".gt_db2; +alter table gt_iceberg.gt_db2.tb03 rename to gt_iceberg.gt_db2.tb01; +show tables from gt_iceberg.gt_db2; -alter table "test.gt_iceberg".gt_db2.tb01 drop column city; -show create table "test.gt_iceberg".gt_db2.tb01; +alter table gt_iceberg.gt_db2.tb01 drop column city; +show create table gt_iceberg.gt_db2.tb01; -alter table "test.gt_iceberg".gt_db2.tb01 rename column name to s; -show create table "test.gt_iceberg".gt_db2.tb01; +alter table gt_iceberg.gt_db2.tb01 rename column name to s; +show create table gt_iceberg.gt_db2.tb01; -alter table "test.gt_iceberg".gt_db2.tb01 alter column salary set data type bigint; -show create table "test.gt_iceberg".gt_db2.tb01; +alter table gt_iceberg.gt_db2.tb01 alter column salary set data type bigint; +show create table gt_iceberg.gt_db2.tb01; -comment on table "test.gt_iceberg".gt_db2.tb01 is 'test table comments'; -show create table "test.gt_iceberg".gt_db2.tb01; +comment on table gt_iceberg.gt_db2.tb01 is 'test table comments'; +show create table gt_iceberg.gt_db2.tb01; -comment on column "test.gt_iceberg".gt_db2.tb01.s is 'test column comments'; -show create table "test.gt_iceberg".gt_db2.tb01; +comment on column gt_iceberg.gt_db2.tb01.s is 'test column comments'; +show create table gt_iceberg.gt_db2.tb01; -alter table "test.gt_iceberg".gt_db2.tb01 add column city varchar comment 'aaa'; -show create table "test.gt_iceberg".gt_db2.tb01; +alter table gt_iceberg.gt_db2.tb01 add column city varchar comment 'aaa'; +show create table gt_iceberg.gt_db2.tb01; -drop table "test.gt_iceberg".gt_db2.tb01; +drop table gt_iceberg.gt_db2.tb01; -drop schema "test.gt_iceberg".gt_db2; +drop schema gt_iceberg.gt_db2; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.txt index e979156e512..969d40b0eb8 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00002_alter_table.txt @@ -12,7 +12,7 @@ RENAME TABLE DROP COLUMN -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( name varchar, salary integer ) @@ -20,7 +20,7 @@ COMMENT ''" RENAME COLUMN -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( s varchar, salary integer ) @@ -28,7 +28,7 @@ COMMENT ''" SET COLUMN TYPE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( s varchar, salary bigint ) @@ -36,7 +36,7 @@ COMMENT ''" COMMENT -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( s varchar, salary bigint ) @@ -44,7 +44,7 @@ COMMENT 'test table comments'" COMMENT -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( s varchar COMMENT 'test column comments', salary bigint ) @@ -52,7 +52,7 @@ COMMENT 'test table comments'" ADD COLUMN -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( s varchar COMMENT 'test column comments', salary bigint, city varchar COMMENT 'aaa' diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.sql index c3d7890550a..72854588809 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_iceberg".gt_db2; +CREATE SCHEMA gt_iceberg.gt_db2; -USE "test.gt_iceberg".gt_db2; +USE gt_iceberg.gt_db2; -- Unsupported Type: TINYINT, SMALLINT CREATE TABLE tb01 ( @@ -64,4 +64,4 @@ drop table tb01; drop table tb02; -drop schema "test.gt_iceberg".gt_db2; +drop schema gt_iceberg.gt_db2; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.txt index 8ff0979aa96..915539adaf5 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00006_datatype.txt @@ -4,7 +4,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb01 ( +"CREATE TABLE gt_iceberg.gt_db2.tb01 ( f1 varchar, f3 varbinary, f4 decimal(10, 3), @@ -30,7 +30,7 @@ INSERT: 1 row CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".gt_db2.tb02 ( +"CREATE TABLE gt_iceberg.gt_db2.tb02 ( f1 varchar NOT NULL, f3 varbinary NOT NULL, f4 decimal(10, 3) NOT NULL, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.sql index 3e19bf5a15b..ab17351747b 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.sql @@ -1,6 +1,6 @@ -CREATE SCHEMA "test.gt_iceberg".varchar_db2; +CREATE SCHEMA gt_iceberg.varchar_db2; -USE "test.gt_iceberg".varchar_db2; +USE gt_iceberg.varchar_db2; CREATE TABLE tb01 (id int, name char(20)); @@ -10,9 +10,9 @@ CREATE TABLE tb03 (id int, name varchar(233)); CREATE TABLE tb04 (id int, name varchar); -SHOW CREATE TABLE "test.gt_iceberg".varchar_db2.tb04; +SHOW CREATE TABLE gt_iceberg.varchar_db2.tb04; -drop table "test.gt_iceberg".varchar_db2.tb04; +drop table gt_iceberg.varchar_db2.tb04; -drop schema "test.gt_iceberg".varchar_db2; +drop schema gt_iceberg.varchar_db2; diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.txt b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.txt index 58e38b89f4a..c7f7ab14e44 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.txt +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/lakehouse-iceberg/00007_varchar.txt @@ -10,7 +10,7 @@ USE CREATE TABLE -"CREATE TABLE ""test.gt_iceberg"".varchar_db2.tb04 ( +"CREATE TABLE gt_iceberg.varchar_db2.tb04 ( id integer, name varchar ) diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/tpcds/catalog_mysql_prepare.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/tpcds/catalog_mysql_prepare.sql index 81ca5c14654..86bd2f72a6e 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/tpcds/catalog_mysql_prepare.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/tpcds/catalog_mysql_prepare.sql @@ -9,8 +9,8 @@ call gravitino.system.create_catalog( show catalogs; -create schema "test.gt_mysql1".gt_tpcds; -use "test.gt_mysql1".gt_tpcds; +create schema gt_mysql1.gt_tpcds; +use gt_mysql1.gt_tpcds; CREATE TABLE call_center ( cc_call_center_sk bigint, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_hive_prepare.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_hive_prepare.sql index cc015bccbdf..2fb4294af7d 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_hive_prepare.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_hive_prepare.sql @@ -7,8 +7,8 @@ call gravitino.system.create_catalog( ) ); -create schema "test.gt_hive2".gt_tpch; -use "test.gt_hive2".gt_tpch; +create schema gt_hive2.gt_tpch; +use gt_hive2.gt_tpch; CREATE TABLE customer ( custkey bigint, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_iceberg_prepare.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_iceberg_prepare.sql index dfd439612d3..a5a0669f839 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_iceberg_prepare.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_iceberg_prepare.sql @@ -7,8 +7,8 @@ call gravitino.system.create_catalog( ) ); -create schema "test.gt_iceberg2".gt_tpch2; -use "test.gt_iceberg2".gt_tpch2; +create schema gt_iceberg2.gt_tpch2; +use gt_iceberg2.gt_tpch2; CREATE TABLE customer ( custkey bigint, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_mysql_prepare.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_mysql_prepare.sql index 3376ab82977..5f42569f2fc 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_mysql_prepare.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_mysql_prepare.sql @@ -7,8 +7,8 @@ call gravitino.system.create_catalog( ) ); -create schema "test.gt_mysql2".gt_tpch; -use "test.gt_mysql2".gt_tpch; +create schema gt_mysql2.gt_tpch; +use gt_mysql2.gt_tpch; CREATE TABLE customer ( custkey bigint NOT NULL, diff --git a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_postgresql_prepare.sql b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_postgresql_prepare.sql index d4b8444a016..bacbc98ea0d 100644 --- a/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_postgresql_prepare.sql +++ b/integration-test/src/test/resources/trino-ci-testset/testsets/tpch/catalog_postgresql_prepare.sql @@ -7,8 +7,8 @@ call gravitino.system.create_catalog( ) ); -create schema "test.gt_postgresql2".gt_tpch; -use "test.gt_postgresql2".gt_tpch; +create schema gt_postgresql2.gt_tpch; +use gt_postgresql2.gt_tpch; CREATE TABLE customer ( custkey bigint NOT NULL, diff --git a/server-common/src/main/java/com/datastrato/gravitino/server/authentication/KerberosConfig.java b/server-common/src/main/java/com/datastrato/gravitino/server/authentication/KerberosConfig.java index 53573cda7e1..bdb4019e16b 100644 --- a/server-common/src/main/java/com/datastrato/gravitino/server/authentication/KerberosConfig.java +++ b/server-common/src/main/java/com/datastrato/gravitino/server/authentication/KerberosConfig.java @@ -4,13 +4,12 @@ */ package com.datastrato.gravitino.server.authentication; -import com.datastrato.gravitino.Configs; import com.datastrato.gravitino.config.ConfigBuilder; import com.datastrato.gravitino.config.ConfigConstants; import com.datastrato.gravitino.config.ConfigEntry; import org.apache.commons.lang3.StringUtils; -public interface KerberosConfig extends Configs { +public interface KerberosConfig { String KERBEROS_CONFIG_PREFIX = "gravitino.authenticator.kerberos."; diff --git a/server-common/src/main/java/com/datastrato/gravitino/server/authentication/OAuthConfig.java b/server-common/src/main/java/com/datastrato/gravitino/server/authentication/OAuthConfig.java index 820dec0578e..70f44c9600f 100644 --- a/server-common/src/main/java/com/datastrato/gravitino/server/authentication/OAuthConfig.java +++ b/server-common/src/main/java/com/datastrato/gravitino/server/authentication/OAuthConfig.java @@ -5,14 +5,14 @@ package com.datastrato.gravitino.server.authentication; -import com.datastrato.gravitino.Configs; import com.datastrato.gravitino.config.ConfigBuilder; import com.datastrato.gravitino.config.ConfigConstants; import com.datastrato.gravitino.config.ConfigEntry; import io.jsonwebtoken.SignatureAlgorithm; import org.apache.commons.lang3.StringUtils; -public interface OAuthConfig extends Configs { +public interface OAuthConfig { + String OAUTH_CONFIG_PREFIX = "gravitino.authenticator.oauth."; ConfigEntry SERVICE_AUDIENCE = diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/OperationType.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/OperationType.java index 78a4f56aa0a..6daacfc822e 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/web/rest/OperationType.java +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/OperationType.java @@ -13,5 +13,6 @@ public enum OperationType { /** This is a special operation type that is used to get a partition from a table. */ GET, ADD, - REMOVE + REMOVE, + DELETE } diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java index c89f27d0a7c..14b8d331179 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java @@ -11,7 +11,7 @@ import com.datastrato.gravitino.authorization.Privileges; import com.datastrato.gravitino.authorization.SecurableObjects; import com.datastrato.gravitino.dto.requests.RoleCreateRequest; -import com.datastrato.gravitino.dto.responses.DropResponse; +import com.datastrato.gravitino.dto.responses.DeleteResponse; import com.datastrato.gravitino.dto.responses.RoleResponse; import com.datastrato.gravitino.dto.util.DTOConverters; import com.datastrato.gravitino.metrics.MetricNames; @@ -44,18 +44,18 @@ public RoleOperations() { @GET @Path("{role}") @Produces("application/vnd.gravitino.v1+json") - @Timed(name = "load-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) - @ResponseMetered(name = "load-role", absolute = true) - public Response loadRole(@PathParam("metalake") String metalake, @PathParam("role") String role) { + @Timed(name = "get-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) + @ResponseMetered(name = "get-role", absolute = true) + public Response getRole(@PathParam("metalake") String metalake, @PathParam("role") String role) { try { return Utils.doAs( httpRequest, () -> Utils.ok( new RoleResponse( - DTOConverters.toDTO(accessControlManager.loadRole(metalake, role))))); + DTOConverters.toDTO(accessControlManager.getRole(metalake, role))))); } catch (Exception e) { - return ExceptionHandlers.handleRoleException(OperationType.LOAD, role, metalake, e); + return ExceptionHandlers.handleRoleException(OperationType.GET, role, metalake, e); } } @@ -63,7 +63,7 @@ public Response loadRole(@PathParam("metalake") String metalake, @PathParam("rol @Produces("application/vnd.gravitino.v1+json") @Timed(name = "create-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) @ResponseMetered(name = "create-role", absolute = true) - public Response creatRole(@PathParam("metalake") String metalake, RoleCreateRequest request) { + public Response createRole(@PathParam("metalake") String metalake, RoleCreateRequest request) { try { return Utils.doAs( httpRequest, @@ -88,21 +88,22 @@ public Response creatRole(@PathParam("metalake") String metalake, RoleCreateRequ @DELETE @Path("{role}") @Produces("application/vnd.gravitino.v1+json") - @Timed(name = "drop-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) - @ResponseMetered(name = "drop-role", absolute = true) - public Response dropRole(@PathParam("metalake") String metalake, @PathParam("role") String role) { + @Timed(name = "delete-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) + @ResponseMetered(name = "delete-role", absolute = true) + public Response deleteRole( + @PathParam("metalake") String metalake, @PathParam("role") String role) { try { return Utils.doAs( httpRequest, () -> { - boolean dropped = accessControlManager.dropRole(metalake, role); - if (!dropped) { - LOG.warn("Failed to drop role {} under metalake {}", role, metalake); + boolean deteted = accessControlManager.deleteRole(metalake, role); + if (!deteted) { + LOG.warn("Failed to delete role {} under metalake {}", role, metalake); } - return Utils.ok(new DropResponse(dropped)); + return Utils.ok(new DeleteResponse(deteted)); }); } catch (Exception e) { - return ExceptionHandlers.handleRoleException(OperationType.DROP, role, metalake, e); + return ExceptionHandlers.handleRoleException(OperationType.DELETE, role, metalake, e); } } } diff --git a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java index 17d74cc02bb..59397bce665 100644 --- a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java +++ b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java @@ -20,7 +20,7 @@ import com.datastrato.gravitino.authorization.SecurableObjects; import com.datastrato.gravitino.dto.authorization.RoleDTO; import com.datastrato.gravitino.dto.requests.RoleCreateRequest; -import com.datastrato.gravitino.dto.responses.DropResponse; +import com.datastrato.gravitino.dto.responses.DeleteResponse; import com.datastrato.gravitino.dto.responses.ErrorConstants; import com.datastrato.gravitino.dto.responses.ErrorResponse; import com.datastrato.gravitino.dto.responses.RoleResponse; @@ -176,10 +176,10 @@ public void testCreateRole() { } @Test - public void testLoadRole() { + public void testGetRole() { Role role = buildRole("role1"); - when(manager.loadRole(any(), any())).thenReturn(role); + when(manager.getRole(any(), any())).thenReturn(role); Response resp = target("/metalakes/metalake1/roles/role1") @@ -198,7 +198,7 @@ public void testLoadRole() { Assertions.assertEquals(Lists.newArrayList(Privileges.LoadCatalog.get()), roleDTO.privileges()); // Test to throw NoSuchMetalakeException - doThrow(new NoSuchMetalakeException("mock error")).when(manager).loadRole(any(), any()); + doThrow(new NoSuchMetalakeException("mock error")).when(manager).getRole(any(), any()); Response resp1 = target("/metalakes/metalake1/roles/role1") .request(MediaType.APPLICATION_JSON_TYPE) @@ -212,7 +212,7 @@ public void testLoadRole() { Assertions.assertEquals(NoSuchMetalakeException.class.getSimpleName(), errorResponse.getType()); // Test to throw NoSuchRoleException - doThrow(new NoSuchRoleException("mock error")).when(manager).loadRole(any(), any()); + doThrow(new NoSuchRoleException("mock error")).when(manager).getRole(any(), any()); Response resp2 = target("/metalakes/metalake1/roles/role1") .request(MediaType.APPLICATION_JSON_TYPE) @@ -226,7 +226,7 @@ public void testLoadRole() { Assertions.assertEquals(NoSuchRoleException.class.getSimpleName(), errorResponse1.getType()); // Test to throw internal RuntimeException - doThrow(new RuntimeException("mock error")).when(manager).loadRole(any(), any()); + doThrow(new RuntimeException("mock error")).when(manager).getRole(any(), any()); Response resp3 = target("/metalakes/metalake1/roles/role1") .request(MediaType.APPLICATION_JSON_TYPE) @@ -254,8 +254,8 @@ private Role buildRole(String role) { } @Test - public void testDropRole() { - when(manager.dropRole(any(), any())).thenReturn(true); + public void testDeleteRole() { + when(manager.deleteRole(any(), any())).thenReturn(true); Response resp = target("/metalakes/metalake1/roles/role1") @@ -264,12 +264,12 @@ public void testDropRole() { .delete(); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); - DropResponse dropResponse = resp.readEntity(DropResponse.class); - Assertions.assertEquals(0, dropResponse.getCode()); - Assertions.assertTrue(dropResponse.dropped()); + DeleteResponse deleteResponse = resp.readEntity(DeleteResponse.class); + Assertions.assertEquals(0, deleteResponse.getCode()); + Assertions.assertTrue(deleteResponse.deleted()); - // Test when failed to drop role - when(manager.dropRole(any(), any())).thenReturn(false); + // Test when failed to delete role + when(manager.deleteRole(any(), any())).thenReturn(false); Response resp2 = target("/metalakes/metalake1/roles/role1") .request(MediaType.APPLICATION_JSON_TYPE) @@ -277,11 +277,11 @@ public void testDropRole() { .delete(); Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp2.getStatus()); - DropResponse dropResponse2 = resp2.readEntity(DropResponse.class); - Assertions.assertEquals(0, dropResponse2.getCode()); - Assertions.assertFalse(dropResponse2.dropped()); + DeleteResponse deleteResponse2 = resp2.readEntity(DeleteResponse.class); + Assertions.assertEquals(0, deleteResponse2.getCode()); + Assertions.assertFalse(deleteResponse2.deleted()); - doThrow(new RuntimeException("mock error")).when(manager).dropRole(any(), any()); + doThrow(new RuntimeException("mock error")).when(manager).deleteRole(any(), any()); Response resp3 = target("/metalakes/metalake1/roles/role1") .request(MediaType.APPLICATION_JSON_TYPE) diff --git a/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java b/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java index 83bde5416a5..e8e830a9378 100644 --- a/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java +++ b/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java @@ -25,7 +25,7 @@ void testTableFormat() { hivePropertiesConverter.toGravitinoTableProperties( ImmutableMap.of(HivePropertiesConstants.SPARK_HIVE_STORED_AS, "PARQUET")); Assertions.assertEquals( - hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_FORMAT), "PARQUET"); + "PARQUET", hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_FORMAT)); Assertions.assertThrowsExactly( NotSupportedException.class, () -> @@ -84,8 +84,8 @@ void testExternalTable() { hivePropertiesConverter.toGravitinoTableProperties( ImmutableMap.of(HivePropertiesConstants.SPARK_HIVE_EXTERNAL, "true")); Assertions.assertEquals( - hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_TABLE_TYPE), - HivePropertiesConstants.GRAVITINO_HIVE_EXTERNAL_TABLE); + HivePropertiesConstants.GRAVITINO_HIVE_EXTERNAL_TABLE, + hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_TABLE_TYPE)); hiveProperties = hivePropertiesConverter.toSparkTableProperties( diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConfig.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConfig.java index 63b68bb1626..978e8d1f548 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConfig.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConfig.java @@ -28,7 +28,7 @@ public class GravitinoConfig { new ConfigEntry( "gravitino.simplify-catalog-names", "Omit metalake prefix for catalog names", - "false", + "true", false); public GravitinoConfig(Map requiredConfig) { diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConnectorFactory.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConnectorFactory.java index 1c7a43f09fe..84f84ec2665 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConnectorFactory.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoConnectorFactory.java @@ -64,8 +64,7 @@ public Connector create( try { CatalogInjector catalogInjector = new CatalogInjector(); catalogInjector.init(context); - CatalogConnectorFactory catalogConnectorFactory = - new CatalogConnectorFactory(catalogInjector); + CatalogConnectorFactory catalogConnectorFactory = new CatalogConnectorFactory(); catalogConnectorManager = new CatalogConnectorManager(catalogInjector, catalogConnectorFactory); @@ -95,7 +94,7 @@ public Connector create( if (Strings.isNullOrEmpty(metalake)) { throw new TrinoException(GRAVITINO_METALAKE_NOT_EXISTS, "No gravitino metalake selected"); } - if (config.simplifyCatalogNames() && catalogConnectorManager.getCatalogs().size() > 1) { + if (config.simplifyCatalogNames() && !catalogConnectorManager.getUsedMetalakes().isEmpty()) { throw new TrinoException( GRAVITINO_MISSING_CONFIG, "Multiple metalakes are not supported when setting gravitino.simplify-catalog-names = true"); diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorFactory.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorFactory.java index ad141b6386a..1b8ab569247 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorFactory.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorFactory.java @@ -4,10 +4,8 @@ */ package com.datastrato.gravitino.trino.connector.catalog; -import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_CREATE_INTERNAL_CONNECTOR_ERROR; import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_UNSUPPORTED_CATALOG_PROVIDER; -import com.datastrato.gravitino.client.GravitinoMetalake; import com.datastrato.gravitino.trino.connector.catalog.hive.HiveConnectorAdapter; import com.datastrato.gravitino.trino.connector.catalog.iceberg.IcebergConnectorAdapter; import com.datastrato.gravitino.trino.connector.catalog.jdbc.mysql.MySQLConnectorAdapter; @@ -15,7 +13,6 @@ import com.datastrato.gravitino.trino.connector.catalog.memory.MemoryConnectorAdapter; import com.datastrato.gravitino.trino.connector.metadata.GravitinoCatalog; import io.trino.spi.TrinoException; -import io.trino.spi.connector.Connector; import java.util.HashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -24,12 +21,9 @@ public class CatalogConnectorFactory { private static final Logger LOG = LoggerFactory.getLogger(CatalogConnectorFactory.class); - private final CatalogInjector catalogInjector; private final HashMap catalogBuilders = new HashMap<>(); - public CatalogConnectorFactory(CatalogInjector catalogInjector) { - this.catalogInjector = catalogInjector; - + public CatalogConnectorFactory() { catalogBuilders.put("hive", new CatalogConnectorContext.Builder(new HiveConnectorAdapter())); catalogBuilders.put( "memory", new CatalogConnectorContext.Builder(new MemoryConnectorAdapter())); @@ -41,8 +35,8 @@ public CatalogConnectorFactory(CatalogInjector catalogInjector) { "jdbc-postgresql", new CatalogConnectorContext.Builder(new PostgreSQLConnectorAdapter())); } - public CatalogConnectorContext loadCatalogConnector( - GravitinoMetalake metalake, GravitinoCatalog catalog) { + public CatalogConnectorContext.Builder createCatalogConnectorContextBuilder( + GravitinoCatalog catalog) { String catalogProvider = catalog.getProvider(); CatalogConnectorContext.Builder builder = catalogBuilders.get(catalogProvider); if (builder == null) { @@ -52,23 +46,6 @@ public CatalogConnectorContext loadCatalogConnector( } // Avoid using the same builder object to prevent catalog creation errors. - builder = builder.clone(); - - try { - Connector internalConnector = - catalogInjector.createConnector(catalog.getFullName(), builder.buildConfig(catalog)); - - return builder - .withMetalake(metalake) - .withCatalog(catalog) - .withInternalConnector(internalConnector) - .build(); - - } catch (Exception e) { - String message = - String.format("Failed to create internal catalog connector. The catalog is: %s", catalog); - LOG.error(message, e); - throw new TrinoException(GRAVITINO_CREATE_INTERNAL_CONNECTOR_ERROR, message, e); - } + return builder.clone(); } } diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorManager.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorManager.java index 9ba6fefeb77..ac1da69bfbe 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorManager.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogConnectorManager.java @@ -6,8 +6,10 @@ import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_CATALOG_ALREADY_EXISTS; import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_CATALOG_NOT_EXISTS; +import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_CREATE_INTERNAL_CONNECTOR_ERROR; import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_METALAKE_NOT_EXISTS; import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_MISSING_CONFIG; +import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_OPERATION_FAILED; import static com.datastrato.gravitino.trino.connector.GravitinoErrorCode.GRAVITINO_UNSUPPORTED_OPERATION; import com.datastrato.gravitino.Catalog; @@ -25,6 +27,7 @@ import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.trino.spi.TrinoException; +import io.trino.spi.connector.Connector; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -166,7 +169,7 @@ public void loadCatalogs(GravitinoMetalake metalake) { Catalog catalog = metalake.loadCatalog(nameIdentifier); GravitinoCatalog gravitinoCatalog = new GravitinoCatalog(metalake.name(), catalog, config.simplifyCatalogNames()); - if (catalogConnectors.containsKey(gravitinoCatalog.getFullName())) { + if (catalogConnectors.containsKey(getTrinoCatalogName(gravitinoCatalog))) { // Reload catalogs that have been updated in Gravitino server. reloadCatalog(metalake, gravitinoCatalog); @@ -183,13 +186,13 @@ public void loadCatalogs(GravitinoMetalake metalake) { } private void reloadCatalog(GravitinoMetalake metalake, GravitinoCatalog catalog) { - GravitinoCatalog oldCatalog = catalogConnectors.get(catalog.getFullName()).getCatalog(); + GravitinoCatalog oldCatalog = catalogConnectors.get(getTrinoCatalogName(catalog)).getCatalog(); if (!catalog.getLastModifiedTime().isAfter(oldCatalog.getLastModifiedTime())) { return; } - catalogInjector.removeCatalogConnector(catalog.getFullName()); - catalogConnectors.remove(catalog.getFullName()); + catalogInjector.removeCatalogConnector((getTrinoCatalogName(catalog))); + catalogConnectors.remove(getTrinoCatalogName(catalog)); loadCatalogImpl(metalake, catalog); LOG.info("Update catalog '{}' in metalake {} successfully.", catalog, metalake.name()); @@ -197,16 +200,27 @@ private void reloadCatalog(GravitinoMetalake metalake, GravitinoCatalog catalog) private void loadCatalog(GravitinoMetalake metalake, GravitinoCatalog catalog) { loadCatalogImpl(metalake, catalog); - LOG.info( - "Load catalog {} in metalake {} successfully.", catalog.getFullName(), metalake.name()); + LOG.info("Load catalog {} in metalake {} successfully.", catalog, metalake.name()); } private void loadCatalogImpl(GravitinoMetalake metalake, GravitinoCatalog catalog) { - CatalogConnectorContext catalogConnectorContext = - catalogConnectorFactory.loadCatalogConnector(metalake, catalog); + CatalogConnectorContext.Builder builder = + catalogConnectorFactory.createCatalogConnectorContextBuilder(catalog); + try { + Connector internalConnector = + catalogInjector.createConnector( + getTrinoCatalogName(catalog), builder.buildConfig(catalog)); - catalogConnectors.put(catalog.getFullName(), catalogConnectorContext); - catalogInjector.injectCatalogConnector(catalog.getFullName()); + builder.withMetalake(metalake).withCatalog(catalog).withInternalConnector(internalConnector); + } catch (Exception e) { + String message = + String.format("Failed to create internal catalog connector. The catalog is: %s", catalog); + LOG.error(message, e); + throw new TrinoException(GRAVITINO_CREATE_INTERNAL_CONNECTOR_ERROR, message, e); + } + + catalogConnectors.put(getTrinoCatalogName(catalog), builder.build()); + catalogInjector.injectCatalogConnector(getTrinoCatalogName(catalog)); } private void unloadCatalog(GravitinoMetalake metalake, String catalogFullName) { @@ -228,6 +242,14 @@ public void shutdown() { throw new NotImplementedException(); } + public String getTrinoCatalogName(String metalake, String catalog) { + return config.simplifyCatalogNames() ? catalog : metalake + "." + catalog; + } + + public String getTrinoCatalogName(GravitinoCatalog catalog) { + return getTrinoCatalogName(catalog.getMetalake(), catalog.getName()); + } + public void createCatalog( String metalakeName, String catalogName, @@ -235,7 +257,7 @@ public void createCatalog( Map properties, boolean ignoreExist) { NameIdentifier catalog = NameIdentifier.of(metalakeName, catalogName); - if (catalogConnectors.containsKey(catalog.toString())) { + if (catalogConnectors.containsKey(getTrinoCatalogName(metalakeName, catalogName))) { if (!ignoreExist) { throw new TrinoException( GRAVITINO_CATALOG_ALREADY_EXISTS, String.format("Catalog %s already exists.", catalog)); @@ -249,11 +271,15 @@ public void createCatalog( metalake.createCatalog( catalog, Catalog.Type.RELATIONAL, provider, "Trino created", properties); - LOG.info("Create catalog {} in metalake {} successfully.", catalog, metalake); + LOG.info("Create catalog {} in metalake {} successfully.", catalogName, metalake); Future future = executorService.submit(this::loadMetalake); future.get(30, TimeUnit.SECONDS); + if (!catalogConnectors.containsKey(getTrinoCatalogName(metalakeName, catalogName))) { + throw new TrinoException( + GRAVITINO_OPERATION_FAILED, "Create catalog failed due to the loading process fails"); + } } catch (NoSuchMetalakeException e) { throw new TrinoException( GRAVITINO_METALAKE_NOT_EXISTS, "Metalake " + metalakeName + " not exists."); @@ -286,11 +312,15 @@ public void dropCatalog(String metalakeName, String catalogName, boolean ignoreN throw new TrinoException( GRAVITINO_UNSUPPORTED_OPERATION, "Drop catalog " + catalog + " does not support."); } - LOG.info("Drop catalog {} in metalake {} successfully.", catalog, metalake); + LOG.info("Drop catalog {} in metalake {} successfully.", catalogName, metalake); Future future = executorService.submit(this::loadMetalake); future.get(30, TimeUnit.SECONDS); + if (catalogConnectors.containsKey(getTrinoCatalogName(metalakeName, catalogName))) { + throw new TrinoException( + GRAVITINO_OPERATION_FAILED, "Drop catalog failed due to the reloading process fails"); + } } catch (NoSuchMetalakeException e) { throw new TrinoException( GRAVITINO_METALAKE_NOT_EXISTS, "Metalake " + metalakeName + " not exists."); @@ -305,10 +335,10 @@ public void alterCatalog( String catalogName, Map setProperties, List removeProperties) { + NameIdentifier catalog = NameIdentifier.of(metalakeName, catalogName); try { - NameIdentifier catalogNameId = NameIdentifier.of(metalakeName, catalogName); CatalogConnectorContext catalogConnectorContext = - catalogConnectors.get(catalogNameId.toString()); + catalogConnectors.get(getTrinoCatalogName(metalakeName, catalogName)); GravitinoCatalog oldCatalog = catalogConnectorContext.getCatalog(); List changes = new ArrayList<>(); @@ -341,19 +371,27 @@ public void alterCatalog( GravitinoMetalake metalake = gravitinoClient.loadMetalake(NameIdentifier.ofMetalake(metalakeName)); - metalake.alterCatalog( - NameIdentifier.of(metalakeName, catalogName), - changes.toArray(changes.toArray(new CatalogChange[0]))); + metalake.alterCatalog(catalog, changes.toArray(changes.toArray(new CatalogChange[0]))); Future future = executorService.submit(this::loadMetalake); future.get(30, TimeUnit.SECONDS); + catalogConnectorContext = + catalogConnectors.get(getTrinoCatalogName(metalakeName, catalogName)); + if (catalogConnectorContext == null + || catalogConnectorContext + .getCatalog() + .getLastModifiedTime() + .equals(oldCatalog.getLastModifiedTime())) { + throw new TrinoException( + GRAVITINO_OPERATION_FAILED, "Update catalog failed due to the reloading process fails"); + } + } catch (NoSuchMetalakeException e) { throw new TrinoException( GRAVITINO_METALAKE_NOT_EXISTS, "Metalake " + metalakeName + " not exists."); } catch (NoSuchCatalogException e) { - throw new TrinoException( - GRAVITINO_CATALOG_NOT_EXISTS, "Catalog " + catalogName + " not exists."); + throw new TrinoException(GRAVITINO_CATALOG_NOT_EXISTS, "Catalog " + catalog + " not exists."); } catch (Exception e) { throw new TrinoException( GRAVITINO_UNSUPPORTED_OPERATION, "alter catalog failed. " + e.getMessage(), e); @@ -367,4 +405,8 @@ public void addMetalake(String metalake) { "Multiple metalakes are not supported when setting gravitino.simplify-catalog-names = true"); usedMetalakes.add(metalake); } + + public Set getUsedMetalakes() { + return usedMetalakes; + } } diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogInjector.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogInjector.java index 02cac9f4be4..a33a9b85984 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogInjector.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/CatalogInjector.java @@ -336,7 +336,7 @@ Connector createConnector(String connectorName, Map properties) LOG.error( "Create internal catalog connector {} failed. Connector properties: {} ", connectorName, - properties.toString(), + properties, e); throw new TrinoException(GRAVITINO_CREATE_INNER_CONNECTOR_FAILED, e); } diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoCatalog.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoCatalog.java index 19c2fc81659..4dd61b1c368 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoCatalog.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/metadata/GravitinoCatalog.java @@ -18,12 +18,10 @@ public class GravitinoCatalog { private final String metalake; private final Catalog catalog; - private final boolean usingSimpleName; public GravitinoCatalog(String metalake, Catalog catalog, boolean usingSimpleName) { this.metalake = metalake; this.catalog = catalog; - this.usingSimpleName = usingSimpleName; } public String getProvider() { @@ -34,8 +32,8 @@ public String getName() { return catalog.name(); } - public String getFullName() { - return usingSimpleName ? catalog.name() : metalake + "." + catalog.name(); + public String getMetalake() { + return metalake; } public NameIdentifier geNameIdentifier() { diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/system/table/GravitinoSystemTableCatalog.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/system/table/GravitinoSystemTableCatalog.java index 2996f8a9688..63db9900ed7 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/system/table/GravitinoSystemTableCatalog.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/system/table/GravitinoSystemTableCatalog.java @@ -52,7 +52,7 @@ public Page loadPageData() { for (GravitinoCatalog catalog : catalogs) { Preconditions.checkNotNull(catalog, "catalog should not be null"); - VARCHAR.writeString(nameColumnBuilder, catalog.getFullName()); + VARCHAR.writeString(nameColumnBuilder, catalog.getName()); VARCHAR.writeString(providerColumnBuilder, catalog.getProvider()); try { VARCHAR.writeString( diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java index 0b10579dcf4..66d1fa9821b 100644 --- a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java +++ b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/GravitinoMockServer.java @@ -65,10 +65,6 @@ public class GravitinoMockServer implements AutoCloseable { CatalogConnectorManager catalogConnectorManager; private GeneralDataTypeTransformer dataTypeTransformer = new HiveDataTypeTransformer(); - public GravitinoMockServer() { - this(false); - } - public GravitinoMockServer(boolean simpleCatalogName) { this.simpleCatalogName = simpleCatalogName; createMetalake(NameIdentifier.ofMetalake(testMetalake)); @@ -241,12 +237,13 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector(catalogConnectorManager.getTrinoCatalogName(catalog)) .getMetadataAdapter(); GravitinoSchema schema = new GravitinoSchema(schemaName.name(), properties, ""); metadata.createSchema(null, schemaName.name(), emptyMap(), null); @@ -269,7 +266,8 @@ public Boolean answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); metadata.dropSchema(null, nameIdentifier.name(), cascade); @@ -286,7 +284,8 @@ public NameIdentifier[] answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); return metadata.listSchemaNames(null).stream() @@ -307,7 +306,8 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); memoryConnector.getMetadata(null, null); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); @@ -316,7 +316,7 @@ public Schema answer(InvocationOnMock invocation) throws Throwable { CatalogConnectorMetadataAdapter metadataAdapter = catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector(catalogConnectorManager.getTrinoCatalogName(catalog)) .getMetadataAdapter(); GravitinoSchema gravitinoSchema = @@ -361,7 +361,7 @@ public Table answer(InvocationOnMock invocation) throws Throwable { tableName.schema(), tableName.table(), columns, comment, properties); CatalogConnectorMetadataAdapter metadataAdapter = catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector(catalogConnectorManager.getTrinoCatalogName(catalog)) .getMetadataAdapter(); ConnectorTableMetadata tableMetadata = metadataAdapter.getTableMetadata(gravitinoTable); @@ -369,7 +369,8 @@ public Table answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); metadata.createTable(null, tableMetadata, false); @@ -388,7 +389,8 @@ public Boolean answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); memoryConnector.getMetadata(null, null); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); @@ -415,7 +417,8 @@ public NameIdentifier[] answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); ArrayList tableNames = new ArrayList<>(); @@ -441,7 +444,8 @@ public Boolean answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); return metadata.getTableHandle( @@ -463,7 +467,8 @@ public Table answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); @@ -477,7 +482,7 @@ public Table answer(InvocationOnMock invocation) throws Throwable { CatalogConnectorMetadataAdapter metadataAdapter = catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector(catalogConnectorManager.getTrinoCatalogName(catalog)) .getMetadataAdapter(); GravitinoTable gravitinoTable = metadataAdapter.createTable(tableMetadata); @@ -505,7 +510,8 @@ public Table answer(InvocationOnMock invocation) throws Throwable { MemoryConnector memoryConnector = (MemoryConnector) catalogConnectorManager - .getCatalogConnector(catalog.getFullName()) + .getCatalogConnector( + catalogConnectorManager.getTrinoCatalogName(catalog)) .getInternalConnector(); ConnectorMetadata metadata = memoryConnector.getMetadata(null, null); ConnectorTableHandle tableHandle = @@ -542,7 +548,9 @@ void doAlterTable( GravitinoColumn column = new GravitinoColumn(fieldName, addColumn.getDataType(), -1, "", true); CatalogConnectorMetadataAdapter metadataAdapter = - catalogConnectorManager.getCatalogConnector(catalog.getFullName()).getMetadataAdapter(); + catalogConnectorManager + .getCatalogConnector(catalogConnectorManager.getTrinoCatalogName(catalog)) + .getMetadataAdapter(); metadata.addColumn(null, tableHandle, metadataAdapter.getColumnMetadata(column)); } else if (tableChange instanceof TableChange.DeleteColumn) { diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestCreateGravitinoConnector.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestCreateGravitinoConnector.java index 5b7013b9b75..5f6aa375830 100644 --- a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestCreateGravitinoConnector.java +++ b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestCreateGravitinoConnector.java @@ -19,7 +19,7 @@ public class TestCreateGravitinoConnector { GravitinoMockServer server; @Test - public void testCreateSimpleCatalogNameConnector() throws Exception { + public void testCreateConnectorsWithEnableSimpleCatalog() throws Exception { server = new GravitinoMockServer(true); Session session = testSessionBuilder().setCatalog("gravitino").build(); QueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build(); @@ -28,6 +28,7 @@ public void testCreateSimpleCatalogNameConnector() throws Exception { TestGravitinoPlugin gravitinoPlugin = new TestGravitinoPlugin(gravitinoClient); queryRunner.installPlugin(gravitinoPlugin); + // test create two connector and set gravitino.simplify-catalog-names = true { // create a gravitino connector named gravitino using metalake test HashMap properties = new HashMap<>(); @@ -42,6 +43,7 @@ public void testCreateSimpleCatalogNameConnector() throws Exception { HashMap properties = new HashMap<>(); properties.put("gravitino.metalake", "test1"); properties.put("gravitino.uri", "http://127.0.0.1:8090"); + properties.put("gravitino.simplify-catalog-names", "true"); try { queryRunner.createCatalog("test1", "gravitino", properties); } catch (Exception e) { @@ -51,4 +53,36 @@ public void testCreateSimpleCatalogNameConnector() throws Exception { server.close(); } + + @Test + public void testCreateConnectorsWithDisableSimpleCatalog() throws Exception { + server = new GravitinoMockServer(false); + Session session = testSessionBuilder().setCatalog("gravitino").build(); + QueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build(); + + GravitinoAdminClient gravitinoClient = server.createGravitinoClient(); + TestGravitinoPlugin gravitinoPlugin = new TestGravitinoPlugin(gravitinoClient); + queryRunner.installPlugin(gravitinoPlugin); + + // test create two connector and set gravitino.simplify-catalog-names = false + { + // create a gravitino connector named gravitino using metalake test + HashMap properties = new HashMap<>(); + properties.put("gravitino.metalake", "test"); + properties.put("gravitino.uri", "http://127.0.0.1:8090"); + properties.put("gravitino.simplify-catalog-names", "false"); + queryRunner.createCatalog("test0", "gravitino", properties); + } + + { + // Test failed to create catalog with different metalake + HashMap properties = new HashMap<>(); + properties.put("gravitino.metalake", "test1"); + properties.put("gravitino.uri", "http://127.0.0.1:8090"); + properties.put("gravitino.simplify-catalog-names", "false"); + queryRunner.createCatalog("test1", "gravitino", properties); + } + + server.close(); + } } diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnector.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnector.java index c6054909975..93fb75da8c7 100644 --- a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnector.java +++ b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnector.java @@ -9,7 +9,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertEquals; -import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.client.GravitinoAdminClient; import com.datastrato.gravitino.trino.connector.catalog.CatalogConnectorManager; import io.trino.Session; @@ -19,7 +18,6 @@ import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedRow; import io.trino.testing.QueryRunner; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; @@ -37,7 +35,7 @@ public class TestGravitinoConnector extends AbstractTestQueryFramework { @Override protected QueryRunner createQueryRunner() throws Exception { - server = closeAfterClass(new GravitinoMockServer()); + server = closeAfterClass(new GravitinoMockServer(true)); GravitinoAdminClient gravitinoClient = server.createGravitinoClient(); Session session = testSessionBuilder().setCatalog("gravitino").build(); @@ -50,21 +48,11 @@ protected QueryRunner createQueryRunner() throws Exception { queryRunner.installPlugin(gravitinoPlugin); queryRunner.installPlugin(new MemoryPlugin()); - { - // create a gravitino connector named gravitino using metalake test - HashMap properties = new HashMap<>(); - properties.put("gravitino.metalake", "test"); - properties.put("gravitino.uri", "http://127.0.0.1:8090"); - queryRunner.createCatalog("gravitino", "gravitino", properties); - } - - { - // create a gravitino connector named test1 using metalake test1 - HashMap properties = new HashMap<>(); - properties.put("gravitino.metalake", "test1"); - properties.put("gravitino.uri", "http://127.0.0.1:8090"); - queryRunner.createCatalog("test1", "gravitino", properties); - } + // create a gravitino connector named gravitino using metalake test + HashMap properties = new HashMap<>(); + properties.put("gravitino.metalake", "test"); + properties.put("gravitino.uri", "http://127.0.0.1:8090"); + queryRunner.createCatalog("gravitino", "gravitino", properties); CatalogConnectorManager catalogConnectorManager = gravitinoPlugin.getCatalogConnectorManager(); @@ -82,14 +70,14 @@ protected QueryRunner createQueryRunner() throws Exception { @Test public void testCreateSchema() { - String catalogName = "test.memory"; + String catalogName = "memory"; String schemaName = "db_01"; - String fullSchemaName = String.format("\"%s\".%s", catalogName, schemaName); - assertThat(computeActual("show schemas from \"test.memory\"").getOnlyColumnAsSet()) + String fullSchemaName = String.format("%s.%s", catalogName, schemaName); + assertThat(computeActual("show schemas from " + catalogName).getOnlyColumnAsSet()) .doesNotContain(schemaName); assertUpdate("create schema " + fullSchemaName); - assertThat(computeActual("show schemas from \"test.memory\"").getOnlyColumnAsSet()) + assertThat(computeActual("show schemas from \"memory\"").getOnlyColumnAsSet()) .contains(schemaName); assertThat((String) computeScalar("show create schema " + fullSchemaName)) @@ -108,7 +96,7 @@ public void testCreateSchema() { @Test public void testCreateTable() { - String fullSchemaName = "\"test.memory\".db_01"; + String fullSchemaName = "memory.db_01"; String tableName = "tb_01"; String fullTableName = fullSchemaName + "." + tableName; @@ -127,13 +115,13 @@ public void testCreateTable() { .startsWith(format("CREATE TABLE %s", fullTableName)); // cleanup - assertUpdate("drop table" + fullTableName); + assertUpdate("drop table " + fullTableName); assertUpdate("drop schema " + fullSchemaName); } @Test public void testInsert() throws Exception { - String fullTableName = "\"test.memory\".db_01.tb_01"; + String fullTableName = "\"memory\".db_01.tb_01"; createTestTable(fullTableName); // insert some data. assertUpdate(String.format("insert into %s (a, b) values ('ice', 12)", fullTableName), 1); @@ -152,8 +140,8 @@ public void testInsert() throws Exception { @Test public void testInsertIntoSelect() throws Exception { - String fullTableName1 = "\"test.memory\".db_01.tb_01"; - String fullTableName2 = "\"test.memory\".db_01.tb_02"; + String fullTableName1 = "\"memory\".db_01.tb_01"; + String fullTableName2 = "\"memory\".db_01.tb_02"; createTestTable(fullTableName1); createTestTable(fullTableName2); @@ -171,8 +159,8 @@ public void testInsertIntoSelect() throws Exception { @Test public void testAlterTable() throws Exception { - String fullTableName1 = "\"test.memory\".db_01.tb_01"; - String fullTableName2 = "\"test.memory\".db_01.tb_02"; + String fullTableName1 = "\"memory\".db_01.tb_01"; + String fullTableName2 = "\"memory\".db_01.tb_02"; createTestTable(fullTableName1); // test rename table @@ -223,35 +211,25 @@ public void testAlterTable() throws Exception { public void testCreateCatalog() throws Exception { // testing the catalogs assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("gravitino"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test1"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test.memory"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("memory"); // testing the gravitino connector framework works. assertThat(computeActual("select * from system.jdbc.tables")); // test metalake named test. the connector name is gravitino assertUpdate("call gravitino.system.create_catalog('memory1', 'memory', Map())"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test.memory1"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("memory1"); assertUpdate("call gravitino.system.drop_catalog('memory1')"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("test.memory1"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("memory1"); assertUpdate( "call gravitino.system.create_catalog(" + "catalog=>'memory1', provider=>'memory', properties => Map(array['max_ttl'], array['10']), ignore_exist => true)"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test.memory1"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("memory1"); assertUpdate( "call gravitino.system.drop_catalog(catalog => 'memory1', ignore_not_exist => true)"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("test.memory1"); - - // test metalake named test1. the connnector name is test1 - GravitinoAdminClient gravitinoClient = server.createGravitinoClient(); - gravitinoClient.createMetalake(NameIdentifier.ofMetalake("test1"), "", Collections.emptyMap()); - - assertUpdate("call test1.system.create_catalog('memory1', 'memory', Map())"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test1.memory1"); - assertUpdate("call test1.system.drop_catalog('memory1')"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("test1.memory1"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("memory1"); } @Test @@ -260,7 +238,7 @@ public void testSystemTable() throws Exception { assertEquals(expectedResult.getRowCount(), 1); List expectedRows = expectedResult.getMaterializedRows(); MaterializedRow row = expectedRows.get(0); - assertEquals(row.getField(0), "test.memory"); + assertEquals(row.getField(0), "memory"); assertEquals(row.getField(1), "memory"); assertEquals(row.getField(2), "{\"max_ttl\":\"10\"}"); } diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnectorWithMetalakeCatalogName.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnectorWithMetalakeCatalogName.java new file mode 100644 index 00000000000..ed9d4457a0e --- /dev/null +++ b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnectorWithMetalakeCatalogName.java @@ -0,0 +1,149 @@ +/* + * Copyright 2023 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.trino.connector; + +import static io.trino.testing.TestingSession.testSessionBuilder; +import static java.lang.String.format; +import static org.assertj.core.api.Assertions.assertThat; +import static org.testng.Assert.assertEquals; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.client.GravitinoAdminClient; +import com.datastrato.gravitino.trino.connector.catalog.CatalogConnectorManager; +import io.trino.Session; +import io.trino.plugin.memory.MemoryPlugin; +import io.trino.testing.AbstractTestQueryFramework; +import io.trino.testing.DistributedQueryRunner; +import io.trino.testing.MaterializedResult; +import io.trino.testing.MaterializedRow; +import io.trino.testing.QueryRunner; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.testcontainers.shaded.org.awaitility.Awaitility; +import org.testng.annotations.Test; + +public class TestGravitinoConnectorWithMetalakeCatalogName extends AbstractTestQueryFramework { + + GravitinoMockServer server; + + @Override + protected QueryRunner createQueryRunner() throws Exception { + server = closeAfterClass(new GravitinoMockServer(false)); + GravitinoAdminClient gravitinoClient = server.createGravitinoClient(); + + Session session = testSessionBuilder().setCatalog("gravitino").build(); + QueryRunner queryRunner = null; + try { + queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build(); + + TestGravitinoPlugin gravitinoPlugin = new TestGravitinoPlugin(gravitinoClient); + queryRunner.installPlugin(gravitinoPlugin); + queryRunner.installPlugin(new MemoryPlugin()); + + { + // create a gravitino connector named gravitino using metalake test + HashMap properties = new HashMap<>(); + properties.put("gravitino.metalake", "test"); + properties.put("gravitino.uri", "http://127.0.0.1:8090"); + properties.put("gravitino.simplify-catalog-names", "false"); + queryRunner.createCatalog("gravitino", "gravitino", properties); + } + + { + // create a gravitino connector named test1 using metalake test1 + HashMap properties = new HashMap<>(); + properties.put("gravitino.metalake", "test1"); + properties.put("gravitino.uri", "http://127.0.0.1:8090"); + properties.put("gravitino.simplify-catalog-names", "false"); + queryRunner.createCatalog("test1", "gravitino", properties); + } + + CatalogConnectorManager catalogConnectorManager = + gravitinoPlugin.getCatalogConnectorManager(); + server.setCatalogConnectorManager(catalogConnectorManager); + // Wait for the catalog to be created. Wait for at least 30 seconds. + Awaitility.await() + .atMost(30, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .until(() -> !catalogConnectorManager.getCatalogs().isEmpty()); + } catch (Exception e) { + throw new RuntimeException("Create query runner failed", e); + } + return queryRunner; + } + + @Test + public void testSystemTable() throws Exception { + MaterializedResult expectedResult = computeActual("select * from gravitino.system.catalog"); + assertEquals(expectedResult.getRowCount(), 1); + List expectedRows = expectedResult.getMaterializedRows(); + MaterializedRow row = expectedRows.get(0); + assertEquals(row.getField(0), "memory"); + assertEquals(row.getField(1), "memory"); + assertEquals(row.getField(2), "{\"max_ttl\":\"10\"}"); + } + + @Test + public void testCreateCatalog() throws Exception { + // testing the catalogs + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("gravitino"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test1"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test.memory"); + + // testing the gravitino connector framework works. + assertThat(computeActual("select * from system.jdbc.tables")); + + // test metalake named test. the connector name is gravitino + assertUpdate("call gravitino.system.create_catalog('memory1', 'memory', Map())"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test.memory1"); + assertUpdate("call gravitino.system.drop_catalog('memory1')"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("test.memory1"); + + assertUpdate( + "call gravitino.system.create_catalog(" + + "catalog=>'memory1', provider=>'memory', properties => Map(array['max_ttl'], array['10']), ignore_exist => true)"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test.memory1"); + + assertUpdate( + "call gravitino.system.drop_catalog(catalog => 'memory1', ignore_not_exist => true)"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("test.memory1"); + + // test metalake named test1. the connnector name is test1 + GravitinoAdminClient gravitinoClient = server.createGravitinoClient(); + gravitinoClient.createMetalake(NameIdentifier.ofMetalake("test1"), "", Collections.emptyMap()); + + assertUpdate("call test1.system.create_catalog('memory1', 'memory', Map())"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("test1.memory1"); + assertUpdate("call test1.system.drop_catalog('memory1')"); + assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).doesNotContain("test1.memory1"); + } + + @Test + public void testCreateTable() { + String fullSchemaName = "\"test.memory\".db_01"; + String tableName = "tb_01"; + String fullTableName = fullSchemaName + "." + tableName; + + assertUpdate("create schema " + fullSchemaName); + + // try to get table + assertThat(computeActual("show tables from " + fullSchemaName).getOnlyColumnAsSet()) + .doesNotContain(tableName); + + // try to create table + assertUpdate("create table " + fullTableName + " (a varchar, b int)"); + assertThat(computeActual("show tables from " + fullSchemaName).getOnlyColumnAsSet()) + .contains(tableName); + + assertThat((String) computeScalar("show create table " + fullTableName)) + .startsWith(format("CREATE TABLE %s", fullTableName)); + + // cleanup + assertUpdate("drop table " + fullTableName); + assertUpdate("drop schema " + fullSchemaName); + } +} diff --git a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnectorWithSimpleCatalogName.java b/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnectorWithSimpleCatalogName.java deleted file mode 100644 index 573816701de..00000000000 --- a/trino-connector/src/test/java/com/datastrato/gravitino/trino/connector/TestGravitinoConnectorWithSimpleCatalogName.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2023 Datastrato Pvt Ltd. - * This software is licensed under the Apache License version 2. - */ -package com.datastrato.gravitino.trino.connector; - -import static io.trino.testing.TestingSession.testSessionBuilder; -import static org.assertj.core.api.Assertions.assertThat; -import static org.testng.Assert.assertEquals; - -import com.datastrato.gravitino.client.GravitinoAdminClient; -import com.datastrato.gravitino.trino.connector.catalog.CatalogConnectorManager; -import io.trino.Session; -import io.trino.plugin.memory.MemoryPlugin; -import io.trino.testing.AbstractTestQueryFramework; -import io.trino.testing.DistributedQueryRunner; -import io.trino.testing.MaterializedResult; -import io.trino.testing.MaterializedRow; -import io.trino.testing.QueryRunner; -import java.util.HashMap; -import java.util.List; -import java.util.concurrent.TimeUnit; -import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.testng.annotations.Test; - -public class TestGravitinoConnectorWithSimpleCatalogName extends AbstractTestQueryFramework { - - GravitinoMockServer server; - - @Override - protected QueryRunner createQueryRunner() throws Exception { - server = closeAfterClass(new GravitinoMockServer(true)); - GravitinoAdminClient gravitinoClient = server.createGravitinoClient(); - - Session session = testSessionBuilder().setCatalog("gravitino").build(); - QueryRunner queryRunner = null; - try { - queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build(); - - TestGravitinoPlugin gravitinoPlugin = new TestGravitinoPlugin(gravitinoClient); - queryRunner.installPlugin(gravitinoPlugin); - queryRunner.installPlugin(new MemoryPlugin()); - - // create a gravitino connector named gravitino using metalake test - HashMap properties = new HashMap<>(); - properties.put("gravitino.metalake", "test"); - properties.put("gravitino.uri", "http://127.0.0.1:8090"); - properties.put("gravitino.simplify-catalog-names", "true"); - queryRunner.createCatalog("gravitino", "gravitino", properties); - - CatalogConnectorManager catalogConnectorManager = - gravitinoPlugin.getCatalogConnectorManager(); - server.setCatalogConnectorManager(catalogConnectorManager); - // Wait for the catalog to be created. Wait for at least 30 seconds. - Awaitility.await() - .atMost(30, TimeUnit.SECONDS) - .pollInterval(1, TimeUnit.SECONDS) - .until(() -> !catalogConnectorManager.getCatalogs().isEmpty()); - } catch (Exception e) { - throw new RuntimeException("Create query runner failed", e); - } - return queryRunner; - } - - @Test - public void testCatalogName() { - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("gravitino"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("memory"); - assertUpdate("call gravitino.system.create_catalog('memory1', 'memory', Map())"); - assertThat(computeActual("show catalogs").getOnlyColumnAsSet()).contains("memory1"); - - String schemaName = "db1"; - String fullSchemaName = String.format("\"%s\".%s", "memory", schemaName); - assertUpdate("create schema " + fullSchemaName); - assertThat(computeActual("show schemas from \"memory\"").getOnlyColumnAsSet()) - .contains(schemaName); - - assertUpdate("drop schema " + fullSchemaName); - assertUpdate("call gravitino.system.drop_catalog('memory1')"); - } - - @Test - public void testSystemTable() throws Exception { - MaterializedResult expectedResult = computeActual("select * from gravitino.system.catalog"); - assertEquals(expectedResult.getRowCount(), 1); - List expectedRows = expectedResult.getMaterializedRows(); - MaterializedRow row = expectedRows.get(0); - assertEquals(row.getField(0), "memory"); - assertEquals(row.getField(1), "memory"); - assertEquals(row.getField(2), "{\"max_ttl\":\"10\"}"); - } -} diff --git a/web/src/app/metalakes/metalake/MetalakeTree.js b/web/src/app/metalakes/metalake/MetalakeTree.js index 58141684290..9435bf7e648 100644 --- a/web/src/app/metalakes/metalake/MetalakeTree.js +++ b/web/src/app/metalakes/metalake/MetalakeTree.js @@ -79,7 +79,7 @@ const MetalakeTree = props => { case 'fileset': { if (store.selectedNodes.includes(nodeProps.data.key)) { const pathArr = extractPlaceholder(nodeProps.data.key) - const [metalake, catalog, schema, fileset] = pathArr + const [metalake, catalog, type, schema, fileset] = pathArr dispatch(getFilesetDetails({ init: true, metalake, catalog, schema, fileset })) } break @@ -87,7 +87,7 @@ const MetalakeTree = props => { case 'topic': { if (store.selectedNodes.includes(nodeProps.data.key)) { const pathArr = extractPlaceholder(nodeProps.data.key) - const [metalake, catalog, schema, topic] = pathArr + const [metalake, catalog, type, schema, topic] = pathArr dispatch(getTopicDetails({ init: true, metalake, catalog, schema, topic })) } break diff --git a/web/src/app/metalakes/metalake/MetalakeView.js b/web/src/app/metalakes/metalake/MetalakeView.js index 44c3e2e2daf..992e6af46b4 100644 --- a/web/src/app/metalakes/metalake/MetalakeView.js +++ b/web/src/app/metalakes/metalake/MetalakeView.js @@ -9,7 +9,7 @@ import { useEffect } from 'react' import { Box } from '@mui/material' -import { useAppDispatch } from '@/lib/hooks/useStore' +import { useAppDispatch, useAppSelector } from '@/lib/hooks/useStore' import { useSearchParams } from 'next/navigation' import MetalakePageLeftBar from './MetalakePageLeftBar' import RightContent from './rightContent/RightContent' @@ -32,8 +32,8 @@ import { const MetalakeView = () => { const dispatch = useAppDispatch() const searchParams = useSearchParams() - const paramsSize = [...searchParams.keys()].length + const store = useAppSelector(state => state.metalakes) useEffect(() => { const routeParams = { @@ -54,11 +54,18 @@ const MetalakeView = () => { } if (paramsSize === 3 && catalog) { + if (!store.catalogs.length) { + dispatch(fetchCatalogs({ metalake })) + } dispatch(fetchSchemas({ init: true, page: 'catalogs', metalake, catalog, type })) dispatch(getCatalogDetails({ metalake, catalog, type })) } if (paramsSize === 4 && catalog && type && schema) { + if (!store.catalogs.length) { + dispatch(fetchCatalogs({ metalake })) + dispatch(fetchSchemas({ metalake, catalog, type })) + } switch (type) { case 'relational': dispatch(fetchTables({ init: true, page: 'schemas', metalake, catalog, schema })) @@ -75,16 +82,20 @@ const MetalakeView = () => { dispatch(getSchemaDetails({ metalake, catalog, schema })) } - if (paramsSize === 5 && catalog && schema && table) { - dispatch(getTableDetails({ init: true, metalake, catalog, schema, table })) - } - - if (paramsSize === 5 && catalog && schema && fileset) { - dispatch(getFilesetDetails({ init: true, metalake, catalog, schema, fileset })) - } - - if (paramsSize === 5 && catalog && schema && topic) { - dispatch(getTopicDetails({ init: true, metalake, catalog, schema, topic })) + if (paramsSize === 5 && catalog && schema) { + if (!store.catalogs.length) { + dispatch(fetchCatalogs({ metalake })) + dispatch(fetchSchemas({ metalake, catalog, type })) + } + if (table) { + dispatch(getTableDetails({ init: true, metalake, catalog, schema, table })) + } + if (fileset) { + dispatch(getFilesetDetails({ init: true, metalake, catalog, schema, fileset })) + } + if (topic) { + dispatch(getTopicDetails({ init: true, metalake, catalog, schema, topic })) + } } } diff --git a/web/src/app/metalakes/metalake/rightContent/CreateCatalogDialog.js b/web/src/app/metalakes/metalake/rightContent/CreateCatalogDialog.js index f3005fcd4f5..cfab9e650db 100644 --- a/web/src/app/metalakes/metalake/rightContent/CreateCatalogDialog.js +++ b/web/src/app/metalakes/metalake/rightContent/CreateCatalogDialog.js @@ -396,7 +396,7 @@ const CreateCatalogDialog = props => { setInnerProps(propsItems) setValue('propItems', propsItems) } - }, [open, data, setValue]) + }, [open, data, setValue, type]) return ( diff --git a/web/src/app/metalakes/metalake/rightContent/tabsContent/TabsContent.js b/web/src/app/metalakes/metalake/rightContent/tabsContent/TabsContent.js index 699d3bf069b..bdea15e7763 100644 --- a/web/src/app/metalakes/metalake/rightContent/tabsContent/TabsContent.js +++ b/web/src/app/metalakes/metalake/rightContent/tabsContent/TabsContent.js @@ -7,7 +7,7 @@ import { Inconsolata } from 'next/font/google' -import { useState, useEffect } from 'react' +import { useState, useEffect, Fragment } from 'react' import { styled, Box, Divider, List, ListItem, ListItemText, Stack, Tab, Typography } from '@mui/material' import Tooltip, { tooltipClasses } from '@mui/material/Tooltip' @@ -163,10 +163,16 @@ const TabsContent = () => { - {item.items.map(i => { + {item.items.map((it, idx) => { return ( - - {item.type === 'sortOrders' ? i.text : i.fields} + + {item.type === 'sortOrders' ? it.text : it.fields.join('.')} ) })} @@ -210,11 +216,16 @@ const TabsContent = () => { textOverflow: 'ellipsis' }} > - - {item.type === 'sortOrders' - ? item.items.map(i => i.text) - : item.items.map(i => i.fields)} - + {item.items.map((it, idx) => { + return ( + + + {it.fields.join('.')} + + {idx < item.items.length - 1 && , } + + ) + })} } /> diff --git a/web/src/app/metalakes/metalake/rightContent/tabsContent/detailsView/DetailsView.js b/web/src/app/metalakes/metalake/rightContent/tabsContent/detailsView/DetailsView.js index a15b287a259..84728a83cf5 100644 --- a/web/src/app/metalakes/metalake/rightContent/tabsContent/detailsView/DetailsView.js +++ b/web/src/app/metalakes/metalake/rightContent/tabsContent/detailsView/DetailsView.js @@ -23,7 +23,7 @@ const DetailsView = () => { const audit = activatedItem?.audit || {} - const properties = Object.keys(activatedItem?.properties || []) + let properties = Object.keys(activatedItem?.properties || []) .filter(key => !['partition-count', 'replication-factor'].includes(key)) .map(item => { return { @@ -32,14 +32,15 @@ const DetailsView = () => { } }) if (paramsSize === 5 && searchParams.get('topic')) { - properties.unshift({ - key: 'replication-factor', - value: JSON.stringify(activatedItem?.properties['replication-factor'])?.replace(/^"|"$/g, '') - }) - properties.unshift({ - key: 'partition-count', - value: JSON.stringify(activatedItem?.properties['partition-count'])?.replace(/^"|"$/g, '') - }) + const topicPros = Object.keys(activatedItem?.properties || []) + .filter(key => ['partition-count', 'replication-factor'].includes(key)) + .map(item => { + return { + key: item, + value: JSON.stringify(activatedItem?.properties[item]).replace(/^"|"$/g, '') + } + }) + properties = [...topicPros, ...properties] } const renderFieldText = ({ value, linkBreak = false, isDate = false }) => { diff --git a/web/src/app/metalakes/metalake/rightContent/tabsContent/tableView/TableView.js b/web/src/app/metalakes/metalake/rightContent/tabsContent/tableView/TableView.js index 1074e86aa43..71a2ea1464f 100644 --- a/web/src/app/metalakes/metalake/rightContent/tabsContent/tableView/TableView.js +++ b/web/src/app/metalakes/metalake/rightContent/tabsContent/tableView/TableView.js @@ -111,10 +111,16 @@ const TableView = () => { - {items.map(i => { + {items.map((it, idx) => { return ( - - {i.text || i.fields} + + {it.text || it.fields} ) })} diff --git a/web/src/lib/store/metalakes/index.js b/web/src/lib/store/metalakes/index.js index eeba3508a4c..30742dea554 100644 --- a/web/src/lib/store/metalakes/index.js +++ b/web/src/lib/store/metalakes/index.js @@ -492,10 +492,6 @@ export const fetchSchemas = createAsyncThunk( ) } - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch(setExpandedNodes([`{{${metalake}}}`, `{{${metalake}}}{{${catalog}}}{{${type}}}`])) return { schemas, page, init } @@ -567,10 +563,6 @@ export const fetchTables = createAsyncThunk( ) } - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch( setExpandedNodes([ `{{${metalake}}}`, @@ -609,7 +601,7 @@ export const getTableDetails = createAsyncThunk( items: partitioning.map(i => { let fields = i.fieldName || [] let sub = '' - let last = i.fieldName.join('.') + let last = i.fieldName switch (i.strategy) { case 'bucket': @@ -680,7 +672,7 @@ export const getTableDetails = createAsyncThunk( fields: i.fieldNames, name: i.name, indexType: i.indexType, - text: `${i.name}(${i.fieldNames.join(',')})` + text: `${i.name}(${i.fieldNames.join('.')})` } }) } @@ -688,10 +680,6 @@ export const getTableDetails = createAsyncThunk( dispatch(setTableProps(tableProps)) - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch( setExpandedNodes([ `{{${metalake}}}`, @@ -753,10 +741,6 @@ export const fetchFilesets = createAsyncThunk( ) } - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch( setExpandedNodes([ `{{${metalake}}}`, @@ -786,10 +770,6 @@ export const getFilesetDetails = createAsyncThunk( const { fileset: resFileset } = res - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch( setExpandedNodes([ `{{${metalake}}}`, @@ -851,10 +831,6 @@ export const fetchTopics = createAsyncThunk( ) } - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch( setExpandedNodes([ `{{${metalake}}}`, @@ -884,10 +860,6 @@ export const getTopicDetails = createAsyncThunk( const { topic: resTopic } = res - if (getState().metalakes.metalakeTree.length === 0) { - dispatch(fetchCatalogs({ metalake })) - } - dispatch( setExpandedNodes([ `{{${metalake}}}`,