From 8d7a2c90b99f5fc65b9c8b898640a99ae03aa55b Mon Sep 17 00:00:00 2001 From: charliecheng630 <74488612+charliecheng630@users.noreply.github.com> Date: Mon, 15 Apr 2024 21:48:21 +0800 Subject: [PATCH 1/7] [#2916] Improvement(trino): Improvement on Collections. EMPTY_LIST (#2951) ### What changes were proposed in this pull request? Use Collections.emptyList() rather than Collections.EMPTY_LIST ### Why are the changes needed? Use of Collections.EMPTY_LIST can cause ClassCastException exactions at runtime, better to let the compiler catch these sort of issues. Fix: #2916 ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? UT --- .../trino/connector/catalog/hive/HiveMetadataAdapter.java | 8 ++++---- .../connector/catalog/iceberg/IcebergMetadataAdapter.java | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/hive/HiveMetadataAdapter.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/hive/HiveMetadataAdapter.java index c4c6824575e..5339b482cee 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/hive/HiveMetadataAdapter.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/hive/HiveMetadataAdapter.java @@ -94,11 +94,11 @@ public GravitinoTable createTable(ConnectorTableMetadata tableMetadata) { List partitionColumns = propertyMap.containsKey(HIVE_PARTITION_KEY) ? (List) propertyMap.get(HIVE_PARTITION_KEY) - : Collections.EMPTY_LIST; + : Collections.emptyList(); List bucketColumns = propertyMap.containsKey(HIVE_BUCKET_KEY) ? (List) propertyMap.get(HIVE_BUCKET_KEY) - : Collections.EMPTY_LIST; + : Collections.emptyList(); int bucketCount = propertyMap.containsKey(HIVE_BUCKET_COUNT_KEY) ? (int) propertyMap.get(HIVE_BUCKET_COUNT_KEY) @@ -106,7 +106,7 @@ public GravitinoTable createTable(ConnectorTableMetadata tableMetadata) { List sortColumns = propertyMap.containsKey(HIVE_SORT_ORDER_KEY) ? (List) propertyMap.get(HIVE_SORT_ORDER_KEY) - : Collections.EMPTY_LIST; + : Collections.emptyList(); if (!sortColumns.isEmpty() && (bucketColumns.isEmpty() || bucketCount == 0)) { throw new TrinoException( @@ -186,7 +186,7 @@ public ConnectorTableMetadata getTableMetadata(GravitinoTable gravitinoTable) { ((Transform.SingleFieldTransform) ts) .fieldName()[0].toLowerCase(Locale.ENGLISH)) .collect(Collectors.toList()) - : Collections.EMPTY_LIST); + : Collections.emptyList()); } if (gravitinoTable.getDistribution() != null diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/iceberg/IcebergMetadataAdapter.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/iceberg/IcebergMetadataAdapter.java index 1eec034ac53..957460a1433 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/iceberg/IcebergMetadataAdapter.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/catalog/iceberg/IcebergMetadataAdapter.java @@ -85,12 +85,12 @@ public GravitinoTable createTable(ConnectorTableMetadata tableMetadata) { List partitionColumns = propertyMap.containsKey(ICEBERG_PARTITIONING_PROPERTY) ? (List) propertyMap.get(ICEBERG_PARTITIONING_PROPERTY) - : Collections.EMPTY_LIST; + : Collections.emptyList(); List sortColumns = propertyMap.containsKey(ICEBERG_SORTED_BY_PROPERTY) ? (List) propertyMap.get(ICEBERG_SORTED_BY_PROPERTY) - : Collections.EMPTY_LIST; + : Collections.emptyList(); Map properties = toGravitinoTableProperties( @@ -153,7 +153,7 @@ public ConnectorTableMetadata getTableMetadata(GravitinoTable gravitinoTable) { ? Arrays.stream(gravitinoTable.getPartitioning()) .map(ts -> ((Transform.SingleFieldTransform) ts).fieldName()[0]) .collect(Collectors.toList()) - : Collections.EMPTY_LIST); + : Collections.emptyList()); } if (ArrayUtils.isNotEmpty(gravitinoTable.getSortOrders())) { From f2fb33558045c84f3f6f7d02222e09cfca598e9f Mon Sep 17 00:00:00 2001 From: FANNG Date: Mon, 15 Apr 2024 22:38:59 +0800 Subject: [PATCH 2/7] [#2767] feat(core): supports fileset event for event listener (#2882) ### What changes were proposed in this pull request? * `CreateFilesetEvent` * `AlterFilesetEvent` * `DropFilesetEvent` * `LoadFilesetEvent` * `ListFilesetEvent` * `CreateFilesetFailureEvent` * `AlterFilesetFailureEvent` * `DropFilesetFailureEvent` * `LoadFilesetFailureEvent` * `ListFilesetFailureEvent` ### Why are the changes needed? Fix: #2767 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? existing tests --- .../datastrato/gravitino/GravitinoEnv.java | 15 ++- .../gravitino/catalog/FilesetDispatcher.java | 16 +++ .../catalog/FilesetEventDispatcher.java | 127 ++++++++++++++++++ .../catalog/FilesetOperationDispatcher.java | 3 +- .../listener/api/event/AlterFilesetEvent.java | 58 ++++++++ .../api/event/AlterFilesetFailureEvent.java | 46 +++++++ .../api/event/CreateFilesetEvent.java | 41 ++++++ .../api/event/CreateFilesetFailureEvent.java | 47 +++++++ .../listener/api/event/DropFilesetEvent.java | 38 ++++++ .../api/event/DropFilesetFailureEvent.java | 27 ++++ .../listener/api/event/FilesetEvent.java | 34 +++++ .../api/event/FilesetFailureEvent.java | 33 +++++ .../listener/api/event/ListFilesetEvent.java | 38 ++++++ .../api/event/ListFilesetFailureEvent.java | 41 ++++++ .../listener/api/event/LoadFilesetEvent.java | 37 +++++ .../api/event/LoadFilesetFailureEvent.java | 25 ++++ .../listener/api/info/FilesetInfo.java | 78 +++++++++++ .../gravitino/server/GravitinoServer.java | 6 +- .../server/web/rest/FilesetOperations.java | 6 +- .../web/rest/TestFilesetOperations.java | 3 +- 20 files changed, 703 insertions(+), 16 deletions(-) create mode 100644 core/src/main/java/com/datastrato/gravitino/catalog/FilesetDispatcher.java create mode 100644 core/src/main/java/com/datastrato/gravitino/catalog/FilesetEventDispatcher.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/info/FilesetInfo.java diff --git a/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java b/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java index c357c9bcc0c..61144fe3e4b 100644 --- a/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java +++ b/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java @@ -7,6 +7,8 @@ import com.datastrato.gravitino.authorization.AccessControlManager; import com.datastrato.gravitino.auxiliary.AuxiliaryServiceManager; import com.datastrato.gravitino.catalog.CatalogManager; +import com.datastrato.gravitino.catalog.FilesetDispatcher; +import com.datastrato.gravitino.catalog.FilesetEventDispatcher; import com.datastrato.gravitino.catalog.FilesetOperationDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; import com.datastrato.gravitino.catalog.TableDispatcher; @@ -43,7 +45,7 @@ public class GravitinoEnv { private TableDispatcher tableDispatcher; - private FilesetOperationDispatcher filesetOperationDispatcher; + private FilesetDispatcher filesetDispatcher; private TopicOperationDispatcher topicOperationDispatcher; @@ -132,8 +134,9 @@ public void initialize(Config config) { TableOperationDispatcher tableOperationDispatcher = new TableOperationDispatcher(catalogManager, entityStore, idGenerator); this.tableDispatcher = new TableEventDispatcher(eventBus, tableOperationDispatcher); - this.filesetOperationDispatcher = + FilesetOperationDispatcher filesetOperationDispatcher = new FilesetOperationDispatcher(catalogManager, entityStore, idGenerator); + this.filesetDispatcher = new FilesetEventDispatcher(eventBus, filesetOperationDispatcher); this.topicOperationDispatcher = new TopicOperationDispatcher(catalogManager, entityStore, idGenerator); @@ -201,12 +204,12 @@ public TableDispatcher tableDispatcher() { } /** - * Get the FilesetOperationDispatcher associated with the Gravitino environment. + * Get the FilesetDispatcher associated with the Gravitino environment. * - * @return The FilesetOperationDispatcher instance. + * @return The FilesetDispatcher instance. */ - public FilesetOperationDispatcher filesetOperationDispatcher() { - return filesetOperationDispatcher; + public FilesetDispatcher filesetDispatcher() { + return filesetDispatcher; } /** diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/FilesetDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetDispatcher.java new file mode 100644 index 00000000000..f2d0daf280d --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetDispatcher.java @@ -0,0 +1,16 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.file.FilesetCatalog; + +/** + * {@code FilesetDispatcher} interface acts as a specialization of the {@link FilesetCatalog} + * interface. This interface is designed to potentially add custom behaviors or operations related + * to dispatching or handling fileset-related events or actions that are not covered by the standard + * {@code FilesetCatalog} operations. + */ +public interface FilesetDispatcher extends FilesetCatalog {} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/FilesetEventDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetEventDispatcher.java new file mode 100644 index 00000000000..edc98bb1660 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetEventDispatcher.java @@ -0,0 +1,127 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.exceptions.FilesetAlreadyExistsException; +import com.datastrato.gravitino.exceptions.NoSuchFilesetException; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.file.Fileset; +import com.datastrato.gravitino.file.FilesetChange; +import com.datastrato.gravitino.listener.EventBus; +import com.datastrato.gravitino.listener.api.event.AlterFilesetEvent; +import com.datastrato.gravitino.listener.api.event.AlterFilesetFailureEvent; +import com.datastrato.gravitino.listener.api.event.CreateFilesetEvent; +import com.datastrato.gravitino.listener.api.event.CreateFilesetFailureEvent; +import com.datastrato.gravitino.listener.api.event.DropFilesetEvent; +import com.datastrato.gravitino.listener.api.event.DropFilesetFailureEvent; +import com.datastrato.gravitino.listener.api.event.ListFilesetEvent; +import com.datastrato.gravitino.listener.api.event.ListFilesetFailureEvent; +import com.datastrato.gravitino.listener.api.event.LoadFilesetEvent; +import com.datastrato.gravitino.listener.api.event.LoadFilesetFailureEvent; +import com.datastrato.gravitino.listener.api.info.FilesetInfo; +import com.datastrato.gravitino.utils.PrincipalUtils; +import java.util.Map; + +/** + * {@code FilesetEventDispatcher} is a decorator for {@link FilesetDispatcher} that not only + * delegates fileset operations to the underlying catalog dispatcher but also dispatches + * corresponding events to an {@link EventBus} after each operation is completed. This allows for + * event-driven workflows or monitoring of fileset operations. + */ +public class FilesetEventDispatcher implements FilesetDispatcher { + private final EventBus eventBus; + private final FilesetDispatcher dispatcher; + + public FilesetEventDispatcher(EventBus eventBus, FilesetDispatcher dispatcher) { + this.eventBus = eventBus; + this.dispatcher = dispatcher; + } + + @Override + public NameIdentifier[] listFilesets(Namespace namespace) throws NoSuchSchemaException { + try { + NameIdentifier[] nameIdentifiers = dispatcher.listFilesets(namespace); + eventBus.dispatchEvent(new ListFilesetEvent(PrincipalUtils.getCurrentUserName(), namespace)); + return nameIdentifiers; + } catch (Exception e) { + eventBus.dispatchEvent( + new ListFilesetFailureEvent(PrincipalUtils.getCurrentUserName(), namespace, e)); + throw e; + } + } + + @Override + public Fileset loadFileset(NameIdentifier ident) throws NoSuchFilesetException { + try { + Fileset fileset = dispatcher.loadFileset(ident); + eventBus.dispatchEvent( + new LoadFilesetEvent( + PrincipalUtils.getCurrentUserName(), ident, new FilesetInfo(fileset))); + return fileset; + } catch (Exception e) { + eventBus.dispatchEvent( + new LoadFilesetFailureEvent(PrincipalUtils.getCurrentUserName(), ident, e)); + throw e; + } + } + + @Override + public Fileset createFileset( + NameIdentifier ident, + String comment, + Fileset.Type type, + String storageLocation, + Map properties) + throws NoSuchSchemaException, FilesetAlreadyExistsException { + try { + Fileset fileset = dispatcher.createFileset(ident, comment, type, storageLocation, properties); + eventBus.dispatchEvent( + new CreateFilesetEvent( + PrincipalUtils.getCurrentUserName(), ident, new FilesetInfo(fileset))); + return fileset; + } catch (Exception e) { + eventBus.dispatchEvent( + new CreateFilesetFailureEvent( + PrincipalUtils.getCurrentUserName(), + ident, + e, + new FilesetInfo(ident.name(), comment, type, storageLocation, properties, null))); + throw e; + } + } + + @Override + public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes) + throws NoSuchFilesetException, IllegalArgumentException { + try { + Fileset fileset = dispatcher.alterFileset(ident, changes); + eventBus.dispatchEvent( + new AlterFilesetEvent( + PrincipalUtils.getCurrentUserName(), ident, changes, new FilesetInfo(fileset))); + return fileset; + } catch (Exception e) { + eventBus.dispatchEvent( + new AlterFilesetFailureEvent(PrincipalUtils.getCurrentUserName(), ident, e, changes)); + throw e; + } + } + + @Override + public boolean dropFileset(NameIdentifier ident) { + try { + boolean isExists = dispatcher.dropFileset(ident); + eventBus.dispatchEvent( + new DropFilesetEvent(PrincipalUtils.getCurrentUserName(), ident, isExists)); + return isExists; + } catch (Exception e) { + eventBus.dispatchEvent( + new DropFilesetFailureEvent(PrincipalUtils.getCurrentUserName(), ident, e)); + throw e; + } + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/FilesetOperationDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetOperationDispatcher.java index 892b444e1fc..d462e6ac26a 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/FilesetOperationDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/FilesetOperationDispatcher.java @@ -17,12 +17,11 @@ import com.datastrato.gravitino.exceptions.NoSuchSchemaException; import com.datastrato.gravitino.exceptions.NonEmptyEntityException; import com.datastrato.gravitino.file.Fileset; -import com.datastrato.gravitino.file.FilesetCatalog; import com.datastrato.gravitino.file.FilesetChange; import com.datastrato.gravitino.storage.IdGenerator; import java.util.Map; -public class FilesetOperationDispatcher extends OperationDispatcher implements FilesetCatalog { +public class FilesetOperationDispatcher extends OperationDispatcher implements FilesetDispatcher { /** * Creates a new FilesetOperationDispatcher instance. * diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetEvent.java new file mode 100644 index 00000000000..57fed2e8db6 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetEvent.java @@ -0,0 +1,58 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.file.FilesetChange; +import com.datastrato.gravitino.listener.api.info.FilesetInfo; + +/** Represents an event that occurs when a fileset is altered. */ +@DeveloperApi +public final class AlterFilesetEvent extends FilesetEvent { + private final FilesetInfo updatedFilesetInfo; + private final FilesetChange[] filesetChanges; + + /** + * Constructs a new {@code AlterFilesetEvent} instance. + * + * @param user The username of the individual who initiated the fileset alteration. + * @param identifier The unique identifier of the fileset that was altered. + * @param filesetChanges An array of {@link FilesetChange} objects representing the specific + * changes applied to the fileset. + * @param updatedFilesetInfo The {@link FilesetInfo} object representing the state of the fileset + * after the changes were applied. + */ + public AlterFilesetEvent( + String user, + NameIdentifier identifier, + FilesetChange[] filesetChanges, + FilesetInfo updatedFilesetInfo) { + super(user, identifier); + this.filesetChanges = filesetChanges.clone(); + this.updatedFilesetInfo = updatedFilesetInfo; + } + + /** + * Retrieves the array of changes made to the fileset. + * + * @return An array of {@link FilesetChange} objects detailing the modifications applied to the + * fileset. + */ + public FilesetChange[] filesetChanges() { + return filesetChanges; + } + + /** + * Retrieves the information about the fileset after the alterations. + * + * @return A {@link FilesetInfo} object representing the current state of the fileset + * post-alteration. + */ + public FilesetInfo updatedFilesetInfo() { + return updatedFilesetInfo; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetFailureEvent.java new file mode 100644 index 00000000000..515f8cdce92 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterFilesetFailureEvent.java @@ -0,0 +1,46 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.file.FilesetChange; + +/** + * Represents an event that is generated when an attempt to alter a fileset fails due to an + * exception. + */ +@DeveloperApi +public final class AlterFilesetFailureEvent extends FilesetFailureEvent { + private final FilesetChange[] filesetChanges; + + /** + * Constructs a new {@code AlterFilesetFailureEvent}, capturing detailed information about the + * failed attempt to alter a fileset. + * + * @param user The user who initiated the fileset alteration operation. + * @param identifier The identifier of the fileset that was attempted to be altered. + * @param exception The exception that was encountered during the alteration attempt, providing + * insight into the cause of the failure. + * @param filesetChanges An array of {@link FilesetChange} objects representing the changes that + * were attempted on the fileset. + */ + public AlterFilesetFailureEvent( + String user, NameIdentifier identifier, Exception exception, FilesetChange[] filesetChanges) { + super(user, identifier, exception); + this.filesetChanges = filesetChanges.clone(); + } + + /** + * Retrieves the changes that were attempted on the fileset, leading to the failure. + * + * @return An array of {@link FilesetChange} objects detailing the modifications that were + * attempted on the fileset. + */ + public FilesetChange[] filesetChanges() { + return filesetChanges; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetEvent.java new file mode 100644 index 00000000000..d5e091dd725 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetEvent.java @@ -0,0 +1,41 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.FilesetInfo; + +/** Represents an event that is triggered following the successful creation of a fileset. */ +@DeveloperApi +public final class CreateFilesetEvent extends FilesetEvent { + private final FilesetInfo createdFilesetInfo; + + /** + * Constructs a new {@code CreateFilesetEvent}, capturing the essential details surrounding the + * successful creation of a fileset. + * + * @param user The username of the person who initiated the creation of the fileset. + * @param identifier The unique identifier of the newly created fileset. + * @param createdFilesetInfo The state of the fileset immediately following its creation, + * including details such as its location, structure, and access permissions. + */ + public CreateFilesetEvent( + String user, NameIdentifier identifier, FilesetInfo createdFilesetInfo) { + super(user, identifier); + this.createdFilesetInfo = createdFilesetInfo; + } + + /** + * Provides information about the fileset as it was configured at the moment of creation. + * + * @return A {@link FilesetInfo} object encapsulating the state of the fileset immediately after + * its creation. + */ + public FilesetInfo createdFilesetInfo() { + return createdFilesetInfo; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetFailureEvent.java new file mode 100644 index 00000000000..6e2a49c0a01 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateFilesetFailureEvent.java @@ -0,0 +1,47 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.FilesetInfo; + +/** Represents an event triggered upon the unsuccessful attempt to create a fileset. */ +@DeveloperApi +public final class CreateFilesetFailureEvent extends FilesetFailureEvent { + private final FilesetInfo createFilesetRequest; + + /** + * Constructs a new {@code CreateFilesetFailureEvent}, capturing the specifics of the failed + * fileset creation attempt. + * + * @param user The user who initiated the attempt to create the fileset. + * @param identifier The identifier of the fileset intended for creation. + * @param exception The exception encountered during the fileset creation process, shedding light + * on the potential reasons behind the failure. + * @param createFilesetRequest The original request information used to attempt to create the + * fileset. + */ + public CreateFilesetFailureEvent( + String user, + NameIdentifier identifier, + Exception exception, + FilesetInfo createFilesetRequest) { + super(user, identifier, exception); + this.createFilesetRequest = createFilesetRequest; + } + + /** + * Provides insight into the intended configuration of the fileset at the time of the failed + * creation attempt. + * + * @return The {@link FilesetInfo} instance representing the request information for the failed + * fileset creation attempt. + */ + public FilesetInfo createFilesetRequest() { + return createFilesetRequest; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetEvent.java new file mode 100644 index 00000000000..dd65eac931c --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetEvent.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** Represents an event that occurs when a fileset is dropped from the system. */ +@DeveloperApi +public final class DropFilesetEvent extends FilesetEvent { + private final boolean isExists; + + /** + * Constructs a new {@code DropFilesetEvent}, recording the attempt to drop a fileset. + * + * @param user The user who initiated the drop fileset operation. + * @param identifier The identifier of the fileset that was attempted to be dropped. + * @param isExists A boolean flag indicating whether the fileset existed at the time of the + * operation. + */ + public DropFilesetEvent(String user, NameIdentifier identifier, boolean isExists) { + super(user, identifier); + this.isExists = isExists; + } + + /** + * Retrieves the existence status of the fileset at the time of the drop operation. + * + * @return {@code true} if the fileset existed at the time of the operation, otherwise {@code + * false}. + */ + public boolean isExists() { + return isExists; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetFailureEvent.java new file mode 100644 index 00000000000..dcfb8b733ca --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropFilesetFailureEvent.java @@ -0,0 +1,27 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * Represents an event that is generated when an attempt to drop a fileset from the system fails. + */ +@DeveloperApi +public final class DropFilesetFailureEvent extends FilesetFailureEvent { + /** + * Constructs a new {@code DropFilesetFailureEvent}. + * + * @param user The user who initiated the drop fileset operation. + * @param identifier The identifier of the fileset that was attempted to be dropped. + * @param exception The exception that was thrown during the drop operation. This exception is key + * to diagnosing the failure, providing insights into what went wrong during the operation. + */ + public DropFilesetFailureEvent(String user, NameIdentifier identifier, Exception exception) { + super(user, identifier, exception); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetEvent.java new file mode 100644 index 00000000000..2e9e0722c1a --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetEvent.java @@ -0,0 +1,34 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * Represents an abstract base class for events related to fileset operations. Extending {@link + * com.datastrato.gravitino.listener.api.event.Event}, this class narrows the focus to operations + * performed on filesets, such as creation, deletion, or modification. It captures vital information + * including the user performing the operation and the identifier of the fileset being manipulated. + * + *

Concrete implementations of this class are expected to provide additional specifics relevant + * to the particular type of fileset operation being represented, enriching the contextual + * understanding of each event. + */ +@DeveloperApi +public abstract class FilesetEvent extends Event { + /** + * Constructs a new {@code FilesetEvent} with the specified user and fileset identifier. + * + * @param user The user responsible for initiating the fileset operation. This information is + * critical for auditing and tracking the origin of actions. + * @param identifier The identifier of the fileset involved in the operation. This includes + * details essential for pinpointing the specific fileset affected by the operation. + */ + protected FilesetEvent(String user, NameIdentifier identifier) { + super(user, identifier); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetFailureEvent.java new file mode 100644 index 00000000000..010d25c36c3 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/FilesetFailureEvent.java @@ -0,0 +1,33 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * An abstract class representing events that are triggered when a fileset operation fails due to an + * exception. + * + *

Implementations of this class can be used to convey detailed information about failures in + * operations such as creating, updating, deleting, or querying filesets, making it easier to + * diagnose and respond to issues. + */ +@DeveloperApi +public abstract class FilesetFailureEvent extends FailureEvent { + /** + * Constructs a new {@code FilesetFailureEvent} instance, capturing information about the failed + * fileset operation. + * + * @param user The user associated with the failed fileset operation. + * @param identifier The identifier of the fileset that was involved in the failed operation. + * @param exception The exception that was thrown during the fileset operation, indicating the + * cause of the failure. + */ + protected FilesetFailureEvent(String user, NameIdentifier identifier, Exception exception) { + super(user, identifier, exception); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetEvent.java new file mode 100644 index 00000000000..c5e99d6a70f --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetEvent.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * Represents an event that is triggered upon the successful listing of filesets within a system. + */ +@DeveloperApi +public final class ListFilesetEvent extends FilesetEvent { + private final Namespace namespace; + /** + * Constructs a new {@code ListFilesetEvent}. + * + * @param user The user who initiated the listing of filesets. + * @param namespace The namespace within which the filesets are listed. The namespace provides + * contextual information, identifying the scope and boundaries of the listing operation. + */ + public ListFilesetEvent(String user, Namespace namespace) { + super(user, NameIdentifier.of(namespace.toString())); + this.namespace = namespace; + } + + /** + * Retrieves the namespace associated with the failed listing event. + * + * @return The {@link Namespace} that was targeted during the failed listing operation. + */ + public Namespace namespace() { + return namespace; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetFailureEvent.java new file mode 100644 index 00000000000..bf063fd891e --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListFilesetFailureEvent.java @@ -0,0 +1,41 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * Represents an event that is triggered when an attempt to list filesets within a namespace fails. + */ +@DeveloperApi +public final class ListFilesetFailureEvent extends FilesetFailureEvent { + private final Namespace namespace; + + /** + * Constructs a new {@code ListFilesetFailureEvent}. + * + * @param user The username of the individual associated with the failed fileset listing + * operation. + * @param namespace The namespace in which the fileset listing was attempted. + * @param exception The exception encountered during the fileset listing attempt, which serves as + * an indicator of the issues that caused the failure. + */ + public ListFilesetFailureEvent(String user, Namespace namespace, Exception exception) { + super(user, NameIdentifier.of(namespace.toString()), exception); + this.namespace = namespace; + } + + /** + * Retrieves the namespace associated with the failed listing event. + * + * @return The {@link Namespace} that was targeted during the failed listing operation. + */ + public Namespace namespace() { + return namespace; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetEvent.java new file mode 100644 index 00000000000..8d7d238ff9d --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetEvent.java @@ -0,0 +1,37 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.FilesetInfo; + +/** Represents an event that occurs when a fileset is loaded into the system. */ +@DeveloperApi +public final class LoadFilesetEvent extends FilesetEvent { + private final FilesetInfo loadedFilesetInfo; + /** + * Constructs a new {@code LoadFilesetEvent}. + * + * @param user The user who initiated the loading of the fileset. + * @param identifier The unique identifier of the fileset being loaded. + * @param loadedFilesetInfo The state of the fileset post-loading. + */ + public LoadFilesetEvent(String user, NameIdentifier identifier, FilesetInfo loadedFilesetInfo) { + super(user, identifier); + this.loadedFilesetInfo = loadedFilesetInfo; + } + + /** + * Retrieves the state of the fileset as it was made available to the user after successful + * loading. + * + * @return A {@link FilesetInfo} instance encapsulating the details of the fileset as loaded. + */ + public FilesetInfo loadedFilesetInfo() { + return loadedFilesetInfo; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetFailureEvent.java new file mode 100644 index 00000000000..724bd384b88 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadFilesetFailureEvent.java @@ -0,0 +1,25 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** Represents an event that occurs when an attempt to load a fileset into the system fails. */ +@DeveloperApi +public final class LoadFilesetFailureEvent extends FilesetFailureEvent { + /** + * Constructs a new {@code FilesetFailureEvent} instance. + * + * @param user The user associated with the failed fileset operation. + * @param identifier The identifier of the fileset that was involved in the failed operation. + * @param exception The exception that was thrown during the fileset operation, indicating the + * cause of the failure. + */ + public LoadFilesetFailureEvent(String user, NameIdentifier identifier, Exception exception) { + super(user, identifier, exception); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/info/FilesetInfo.java b/core/src/main/java/com/datastrato/gravitino/listener/api/info/FilesetInfo.java new file mode 100644 index 00000000000..eb81f86510f --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/info/FilesetInfo.java @@ -0,0 +1,78 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.info; + +import com.datastrato.gravitino.Audit; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.file.Fileset; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import javax.annotation.Nullable; + +@DeveloperApi +public final class FilesetInfo { + private final String name; + @Nullable private final String comment; + private final Fileset.Type type; + private final String storageLocation; + private final Map properties; + @Nullable private final Audit audit; + + public FilesetInfo(Fileset fileset) { + this( + fileset.name(), + fileset.comment(), + fileset.type(), + fileset.storageLocation(), + fileset.properties(), + fileset.auditInfo()); + } + + public FilesetInfo( + String name, + String comment, + Fileset.Type type, + String storageLocation, + Map properties, + Audit audit) { + this.name = name; + this.comment = comment; + this.type = type; + this.storageLocation = storageLocation; + if (properties == null) { + this.properties = ImmutableMap.of(); + } else { + this.properties = ImmutableMap.builder().putAll(properties).build(); + } + this.audit = audit; + } + + @Nullable + public Audit auditInfo() { + return audit; + } + + public String name() { + return name; + } + + public Fileset.Type getType() { + return type; + } + + public String getStorageLocation() { + return storageLocation; + } + + @Nullable + public String comment() { + return comment; + } + + public Map properties() { + return properties; + } +} diff --git a/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java b/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java index 67756319a65..9c7ba40a3a6 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java +++ b/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java @@ -7,7 +7,7 @@ import com.datastrato.gravitino.Configs; import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.catalog.CatalogManager; -import com.datastrato.gravitino.catalog.FilesetOperationDispatcher; +import com.datastrato.gravitino.catalog.FilesetDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; import com.datastrato.gravitino.catalog.TableDispatcher; import com.datastrato.gravitino.metalake.MetalakeManager; @@ -83,9 +83,7 @@ protected void configure() { .to(SchemaOperationDispatcher.class) .ranked(1); bind(gravitinoEnv.tableDispatcher()).to(TableDispatcher.class).ranked(1); - bind(gravitinoEnv.filesetOperationDispatcher()) - .to(FilesetOperationDispatcher.class) - .ranked(1); + bind(gravitinoEnv.filesetDispatcher()).to(FilesetDispatcher.class).ranked(1); bind(gravitinoEnv.topicOperationDispatcher()) .to(com.datastrato.gravitino.catalog.TopicOperationDispatcher.class) .ranked(1); diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/FilesetOperations.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/FilesetOperations.java index 49f245b0726..1611422ae9a 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/web/rest/FilesetOperations.java +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/FilesetOperations.java @@ -8,7 +8,7 @@ import com.codahale.metrics.annotation.Timed; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; -import com.datastrato.gravitino.catalog.FilesetOperationDispatcher; +import com.datastrato.gravitino.catalog.FilesetDispatcher; import com.datastrato.gravitino.dto.requests.FilesetCreateRequest; import com.datastrato.gravitino.dto.requests.FilesetUpdateRequest; import com.datastrato.gravitino.dto.requests.FilesetUpdatesRequest; @@ -42,12 +42,12 @@ public class FilesetOperations { private static final Logger LOG = LoggerFactory.getLogger(FilesetOperations.class); - private final FilesetOperationDispatcher dispatcher; + private final FilesetDispatcher dispatcher; @Context private HttpServletRequest httpRequest; @Inject - public FilesetOperations(FilesetOperationDispatcher dispatcher) { + public FilesetOperations(FilesetDispatcher dispatcher) { this.dispatcher = dispatcher; } diff --git a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestFilesetOperations.java b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestFilesetOperations.java index 0d1112cda09..f998b4ce47c 100644 --- a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestFilesetOperations.java +++ b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestFilesetOperations.java @@ -16,6 +16,7 @@ import com.datastrato.gravitino.Config; import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.catalog.FilesetDispatcher; import com.datastrato.gravitino.catalog.FilesetOperationDispatcher; import com.datastrato.gravitino.dto.file.FilesetDTO; import com.datastrato.gravitino.dto.requests.FilesetCreateRequest; @@ -94,7 +95,7 @@ protected Application configure() { new AbstractBinder() { @Override protected void configure() { - bind(dispatcher).to(FilesetOperationDispatcher.class).ranked(2); + bind(dispatcher).to(FilesetDispatcher.class).ranked(2); bindFactory(MockServletRequestFactory.class).to(HttpServletRequest.class); } }); From 86e7f46768b6e18d82e91a7a1220a73cb4a71673 Mon Sep 17 00:00:00 2001 From: qqqttt123 <148952220+qqqttt123@users.noreply.github.com> Date: Mon, 15 Apr 2024 23:05:26 +0800 Subject: [PATCH 3/7] [#2239] feat(server): Add the role operation (#2956) ### What changes were proposed in this pull request? Add the operations for the role. ### Why are the changes needed? Fix: #2239 ### Does this PR introduce _any_ user-facing change? Yes, I will add the open api and the document in the later pr. ### How was this patch tested? Add a new UT. Co-authored-by: Heng Qin --- .../authorization/SecurableObjects.java | 24 ++ .../gravitino/dto/authorization/RoleDTO.java | 231 ++++++++++++++ .../dto/requests/RoleCreateRequest.java | 82 +++++ .../gravitino/dto/responses/RoleResponse.java | 59 ++++ .../gravitino/dto/util/DTOConverters.java | 22 ++ .../dto/responses/TestResponses.java | 26 ++ .../gravitino/authorization/RoleManager.java | 2 +- .../datastrato/gravitino/meta/RoleEntity.java | 2 +- .../gravitino/proto/RoleEntitySerDe.java | 21 +- .../datastrato/gravitino/meta/TestEntity.java | 4 +- .../gravitino/proto/TestEntityProtoSerDe.java | 4 +- .../gravitino/storage/TestEntityStorage.java | 2 +- .../server/web/rest/ExceptionHandlers.java | 38 +++ .../server/web/rest/RoleOperations.java | 108 +++++++ .../server/web/rest/TestRoleOperations.java | 298 ++++++++++++++++++ 15 files changed, 896 insertions(+), 27 deletions(-) create mode 100644 common/src/main/java/com/datastrato/gravitino/dto/authorization/RoleDTO.java create mode 100644 common/src/main/java/com/datastrato/gravitino/dto/requests/RoleCreateRequest.java create mode 100644 common/src/main/java/com/datastrato/gravitino/dto/responses/RoleResponse.java create mode 100644 server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java create mode 100644 server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java diff --git a/api/src/main/java/com/datastrato/gravitino/authorization/SecurableObjects.java b/api/src/main/java/com/datastrato/gravitino/authorization/SecurableObjects.java index 48b05f6ef6f..80d68836b84 100644 --- a/api/src/main/java/com/datastrato/gravitino/authorization/SecurableObjects.java +++ b/api/src/main/java/com/datastrato/gravitino/authorization/SecurableObjects.java @@ -4,11 +4,16 @@ */ package com.datastrato.gravitino.authorization; +import com.google.common.base.Splitter; +import com.google.common.collect.Iterables; import java.util.Objects; +import org.apache.commons.lang3.StringUtils; /** The helper class for {@link SecurableObject}. */ public class SecurableObjects { + private static final Splitter DOT = Splitter.on('.'); + /** * Create the {@link SecurableObject} with the given names. * @@ -186,4 +191,23 @@ public boolean equals(Object other) { && Objects.equals(name, otherSecurableObject.name()); } } + + /** + * Create a {@link SecurableObject} from the given identifier string. + * + * @param securableObjectIdentifier The identifier string + * @return The created {@link SecurableObject} + */ + public static SecurableObject parse(String securableObjectIdentifier) { + if ("*".equals(securableObjectIdentifier)) { + return SecurableObjects.ofAllCatalogs(); + } + + if (StringUtils.isBlank(securableObjectIdentifier)) { + throw new IllegalArgumentException("securable object identifier can't be blank"); + } + + Iterable parts = DOT.split(securableObjectIdentifier); + return SecurableObjects.of(Iterables.toArray(parts, String.class)); + } } diff --git a/common/src/main/java/com/datastrato/gravitino/dto/authorization/RoleDTO.java b/common/src/main/java/com/datastrato/gravitino/dto/authorization/RoleDTO.java new file mode 100644 index 00000000000..2f50da3a353 --- /dev/null +++ b/common/src/main/java/com/datastrato/gravitino/dto/authorization/RoleDTO.java @@ -0,0 +1,231 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.dto.authorization; + +import com.datastrato.gravitino.Audit; +import com.datastrato.gravitino.authorization.Privilege; +import com.datastrato.gravitino.authorization.Privileges; +import com.datastrato.gravitino.authorization.Role; +import com.datastrato.gravitino.authorization.SecurableObject; +import com.datastrato.gravitino.authorization.SecurableObjects; +import com.datastrato.gravitino.dto.AuditDTO; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + +/** Represents a Role Data Transfer Object (DTO). */ +public class RoleDTO implements Role { + + @JsonProperty("name") + private String name; + + @JsonProperty("audit") + private AuditDTO audit; + + @Nullable + @JsonProperty("properties") + private Map properties; + + @JsonProperty("privileges") + private List privileges; + + @JsonProperty("securableObject") + private String securableObject; + + /** Default constructor for Jackson deserialization. */ + protected RoleDTO() {} + + /** + * Creates a new instance of RoleDTO. + * + * @param name The name of the Role DTO. + * @param properties The properties of the Role DTO. + * @param privileges The privileges of the Role DTO. + * @param securableObject The securable object of the Role DTO. + * @param audit The audit information of the Role DTO. + */ + protected RoleDTO( + String name, + Map properties, + List privileges, + String securableObject, + AuditDTO audit) { + this.name = name; + this.audit = audit; + this.properties = properties; + this.privileges = privileges; + this.securableObject = securableObject; + } + + /** @return The name of the Role DTO. */ + @Override + public String name() { + return name; + } + + /** + * The properties of the role. Note, this method will return null if the properties are not set. + * + * @return The properties of the role. + */ + @Override + public Map properties() { + return properties; + } + + /** + * The privileges of the role. All privileges belong to one resource. For example: If the resource + * is a table, the privileges could be `READ TABLE`, `WRITE TABLE`, etc. If a schema has the + * privilege of `LOAD TABLE`. It means the role can all tables of the schema. + * + * @return The privileges of the role. + */ + @Override + public List privileges() { + return privileges.stream().map(Privileges::fromString).collect(Collectors.toList()); + } + + /** + * The resource represents a special kind of entity with a unique identifier. All resources are + * organized by tree structure. For example: If the resource is a table, the identifier may be + * `catalog1.schema1.table1`. + * + * @return The securable object of the role. + */ + @Override + public SecurableObject securableObject() { + return SecurableObjects.parse(securableObject); + } + + /** @return The audit information of the Role DTO. */ + @Override + public Audit auditInfo() { + return audit; + } + + /** + * Creates a new Builder for constructing a Role DTO. + * + * @return A new Builder instance. + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Builder class for constructing a RoleDTO instance. + * + * @param The type of the builder instance. + */ + public static class Builder { + + /** The name of the role. */ + protected String name; + + /** The privileges of the role. */ + protected List privileges = Collections.emptyList(); + + /** The audit information of the role. */ + protected AuditDTO audit; + + /** The properties of the role. */ + protected Map properties; + + /** The securable object of the role. */ + protected SecurableObject securableObject; + + /** + * Sets the name of the role. + * + * @param name The name of the role. + * @return The builder instance. + */ + public S withName(String name) { + this.name = name; + return (S) this; + } + + /** + * Sets the privileges of the role. + * + * @param privileges The privileges of the role. + * @return The builder instance. + */ + public S withPrivileges(List privileges) { + if (privileges != null) { + this.privileges = privileges; + } + + return (S) this; + } + + /** + * Sets the properties of the role. + * + * @param properties The properties of the role. + * @return The builder instance. + */ + public S withProperties(Map properties) { + if (properties != null) { + this.properties = properties; + } + + return (S) this; + } + + /** + * Sets the securable object of the role. + * + * @param securableObject The securableObject of the role. + * @return The builder instance. + */ + public S withSecurableObject(SecurableObject securableObject) { + this.securableObject = securableObject; + return (S) this; + } + + /** + * Sets the audit information of the role. + * + * @param audit The audit information of the role. + * @return The builder instance. + */ + public S withAudit(AuditDTO audit) { + this.audit = audit; + return (S) this; + } + + /** + * Builds an instance of RoleDTO using the builder's properties. + * + * @return An instance of RoleDTO. + * @throws IllegalArgumentException If the name or audit are not set. + */ + public RoleDTO build() { + Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be null or empty"); + Preconditions.checkArgument(audit != null, "audit cannot be null"); + Preconditions.checkArgument( + !CollectionUtils.isEmpty(privileges), "privileges can't be empty"); + Preconditions.checkArgument(securableObject != null, "securable object can't null"); + + return new RoleDTO( + name, + properties, + privileges.stream() + .map(Privilege::name) + .map(Objects::toString) + .collect(Collectors.toList()), + securableObject.toString(), + audit); + } + } +} diff --git a/common/src/main/java/com/datastrato/gravitino/dto/requests/RoleCreateRequest.java b/common/src/main/java/com/datastrato/gravitino/dto/requests/RoleCreateRequest.java new file mode 100644 index 00000000000..f2deaf72f9a --- /dev/null +++ b/common/src/main/java/com/datastrato/gravitino/dto/requests/RoleCreateRequest.java @@ -0,0 +1,82 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.dto.requests; + +import com.datastrato.gravitino.rest.RESTRequest; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.ToString; +import lombok.extern.jackson.Jacksonized; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + +/** Represents a request to create a role. */ +@Getter +@EqualsAndHashCode +@ToString +@Builder +@Jacksonized +public class RoleCreateRequest implements RESTRequest { + + @JsonProperty("name") + private final String name; + + @Nullable + @JsonProperty("properties") + private Map properties; + + @JsonProperty("privileges") + private List privileges; + + @JsonProperty("securableObject") + private String securableObject; + + /** Default constructor for RoleCreateRequest. (Used for Jackson deserialization.) */ + public RoleCreateRequest() { + this(null, null, null, null); + } + + /** + * Creates a new RoleCreateRequest. + * + * @param name The name of the role. + * @param properties The properties of the role. + * @param securableObject The securable object of the role. + * @param privileges The privileges of the role. + */ + public RoleCreateRequest( + String name, + Map properties, + List privileges, + String securableObject) { + super(); + this.name = name; + this.properties = properties; + this.privileges = privileges; + this.securableObject = securableObject; + } + + /** + * Validates the {@link RoleCreateRequest} request. + * + * @throws IllegalArgumentException If the request is invalid, this exception is thrown. + */ + @Override + public void validate() throws IllegalArgumentException { + Preconditions.checkArgument( + StringUtils.isNotBlank(name), "\"name\" field is required and cannot be empty"); + + Preconditions.checkArgument( + !CollectionUtils.isEmpty(privileges), "\"privileges\" can't be empty"); + + Preconditions.checkArgument(securableObject != null, "\"securableObject\" can't null"); + } +} diff --git a/common/src/main/java/com/datastrato/gravitino/dto/responses/RoleResponse.java b/common/src/main/java/com/datastrato/gravitino/dto/responses/RoleResponse.java new file mode 100644 index 00000000000..5c1086bb43c --- /dev/null +++ b/common/src/main/java/com/datastrato/gravitino/dto/responses/RoleResponse.java @@ -0,0 +1,59 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.dto.responses; + +import com.datastrato.gravitino.dto.authorization.RoleDTO; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.ToString; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + +/** Represents a response for a role. */ +@Getter +@ToString +@EqualsAndHashCode(callSuper = true) +public class RoleResponse extends BaseResponse { + + @JsonProperty("role") + private final RoleDTO role; + + /** + * Constructor for RoleResponse. + * + * @param role The role data transfer object. + */ + public RoleResponse(RoleDTO role) { + super(0); + this.role = role; + } + + /** Default constructor for RoleResponse. (Used for Jackson deserialization.) */ + public RoleResponse() { + super(); + this.role = null; + } + + /** + * Validates the response data. + * + * @throws IllegalArgumentException if the name or audit is not set. + */ + @Override + public void validate() throws IllegalArgumentException { + super.validate(); + + Preconditions.checkArgument(role != null, "role must not be null"); + Preconditions.checkArgument( + StringUtils.isNotBlank(role.name()), "role 'name' must not be null and empty"); + Preconditions.checkArgument(role.auditInfo() != null, "role 'auditInfo' must not be null"); + Preconditions.checkArgument( + !CollectionUtils.isEmpty(role.privileges()), "role 'privileges' can't be empty"); + Preconditions.checkArgument( + role.securableObject() != null, "role 'securableObject' can't null"); + } +} diff --git a/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java b/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java index a2050e53548..ad93bfbf566 100644 --- a/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java +++ b/common/src/main/java/com/datastrato/gravitino/dto/util/DTOConverters.java @@ -10,11 +10,13 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.Metalake; import com.datastrato.gravitino.authorization.Group; +import com.datastrato.gravitino.authorization.Role; import com.datastrato.gravitino.authorization.User; import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; import com.datastrato.gravitino.dto.MetalakeDTO; import com.datastrato.gravitino.dto.authorization.GroupDTO; +import com.datastrato.gravitino.dto.authorization.RoleDTO; import com.datastrato.gravitino.dto.authorization.UserDTO; import com.datastrato.gravitino.dto.file.FilesetDTO; import com.datastrato.gravitino.dto.messaging.TopicDTO; @@ -369,6 +371,26 @@ public static GroupDTO toDTO(Group group) { .build(); } + /** + * Converts a role implementation to a RoleDTO. + * + * @param role The role implementation. + * @return The role DTO. + */ + public static RoleDTO toDTO(Role role) { + if (role instanceof RoleDTO) { + return (RoleDTO) role; + } + + return RoleDTO.builder() + .withName(role.name()) + .withSecurableObject(role.securableObject()) + .withPrivileges(role.privileges()) + .withProperties(role.properties()) + .withAudit(toDTO(role.auditInfo())) + .build(); + } + /** * Converts a Expression to an FunctionArg DTO. * diff --git a/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java b/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java index 4465cc34f11..96b27b0e851 100644 --- a/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java +++ b/common/src/test/java/com/datastrato/gravitino/dto/responses/TestResponses.java @@ -11,16 +11,20 @@ import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.authorization.Privileges; +import com.datastrato.gravitino.authorization.SecurableObjects; import com.datastrato.gravitino.dto.AuditDTO; import com.datastrato.gravitino.dto.CatalogDTO; import com.datastrato.gravitino.dto.MetalakeDTO; import com.datastrato.gravitino.dto.authorization.GroupDTO; +import com.datastrato.gravitino.dto.authorization.RoleDTO; import com.datastrato.gravitino.dto.authorization.UserDTO; import com.datastrato.gravitino.dto.rel.ColumnDTO; import com.datastrato.gravitino.dto.rel.SchemaDTO; import com.datastrato.gravitino.dto.rel.TableDTO; import com.datastrato.gravitino.dto.rel.partitioning.Partitioning; import com.datastrato.gravitino.rel.types.Types; +import com.google.common.collect.Lists; import java.time.Instant; import org.junit.jupiter.api.Test; @@ -241,6 +245,7 @@ void testUserResponseException() throws IllegalArgumentException { assertThrows(IllegalArgumentException.class, () -> user.validate()); } + @Test void testGroupResponse() throws IllegalArgumentException { AuditDTO audit = AuditDTO.builder().withCreator("creator").withCreateTime(Instant.now()).build(); @@ -254,4 +259,25 @@ void testGroupResponseException() throws IllegalArgumentException { GroupResponse group = new GroupResponse(); assertThrows(IllegalArgumentException.class, () -> group.validate()); } + + @Test + void testRoleResponse() throws IllegalArgumentException { + AuditDTO audit = + AuditDTO.builder().withCreator("creator").withCreateTime(Instant.now()).build(); + RoleDTO role = + RoleDTO.builder() + .withName("role1") + .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) + .withSecurableObject(SecurableObjects.ofCatalog("catalog")) + .withAudit(audit) + .build(); + RoleResponse response = new RoleResponse(role); + response.validate(); // No exception thrown + } + + @Test + void testRoleResponseException() throws IllegalArgumentException { + RoleResponse role = new RoleResponse(); + assertThrows(IllegalArgumentException.class, () -> role.validate()); + } } diff --git a/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java b/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java index 5813744af18..35639e47266 100644 --- a/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java +++ b/core/src/main/java/com/datastrato/gravitino/authorization/RoleManager.java @@ -66,7 +66,7 @@ public Role createRole( .withId(idGenerator.nextId()) .withName(role) .withProperties(properties) - .securableObject(securableObject) + .withSecurableObject(securableObject) .withPrivileges(privileges) .withNamespace( Namespace.of( diff --git a/core/src/main/java/com/datastrato/gravitino/meta/RoleEntity.java b/core/src/main/java/com/datastrato/gravitino/meta/RoleEntity.java index 5e4948fa0b8..b4642f3e5db 100644 --- a/core/src/main/java/com/datastrato/gravitino/meta/RoleEntity.java +++ b/core/src/main/java/com/datastrato/gravitino/meta/RoleEntity.java @@ -234,7 +234,7 @@ public Builder withAuditInfo(AuditInfo auditInfo) { * @param securableObject The securable object of the role entity. * @return The builder instance. */ - public Builder securableObject(SecurableObject securableObject) { + public Builder withSecurableObject(SecurableObject securableObject) { roleEntity.securableObject = securableObject; return this; } diff --git a/core/src/main/java/com/datastrato/gravitino/proto/RoleEntitySerDe.java b/core/src/main/java/com/datastrato/gravitino/proto/RoleEntitySerDe.java index ae65c3a117c..e4a96ae7e63 100644 --- a/core/src/main/java/com/datastrato/gravitino/proto/RoleEntitySerDe.java +++ b/core/src/main/java/com/datastrato/gravitino/proto/RoleEntitySerDe.java @@ -5,18 +5,12 @@ package com.datastrato.gravitino.proto; import com.datastrato.gravitino.authorization.Privileges; -import com.datastrato.gravitino.authorization.SecurableObject; import com.datastrato.gravitino.authorization.SecurableObjects; import com.datastrato.gravitino.meta.RoleEntity; -import com.google.common.base.Splitter; -import com.google.common.collect.Iterables; import java.util.stream.Collectors; -import org.apache.commons.lang3.StringUtils; public class RoleEntitySerDe implements ProtoSerDe { - private static final Splitter DOT = Splitter.on('.'); - /** * Serializes the provided entity into its corresponding Protocol Buffer message representation. * @@ -59,7 +53,7 @@ public RoleEntity deserialize(Role role) { role.getPrivilegesList().stream() .map(Privileges::fromString) .collect(Collectors.toList())) - .securableObject(parseSecurableObject(role.getSecurableObject())) + .withSecurableObject(SecurableObjects.parse(role.getSecurableObject())) .withAuditInfo(new AuditInfoSerDe().deserialize(role.getAuditInfo())); if (!role.getPropertiesMap().isEmpty()) { @@ -68,17 +62,4 @@ public RoleEntity deserialize(Role role) { return builder.build(); } - - private static SecurableObject parseSecurableObject(String securableObjectIdentifier) { - if ("*".equals(securableObjectIdentifier)) { - return SecurableObjects.ofAllCatalogs(); - } - - if (StringUtils.isBlank(securableObjectIdentifier)) { - throw new IllegalArgumentException("securable object identifier can't be blank"); - } - - Iterable parts = DOT.split(securableObjectIdentifier); - return SecurableObjects.of(Iterables.toArray(parts, String.class)); - } } diff --git a/core/src/test/java/com/datastrato/gravitino/meta/TestEntity.java b/core/src/test/java/com/datastrato/gravitino/meta/TestEntity.java index 5d1f689a6ae..b852d340101 100644 --- a/core/src/test/java/com/datastrato/gravitino/meta/TestEntity.java +++ b/core/src/test/java/com/datastrato/gravitino/meta/TestEntity.java @@ -266,7 +266,7 @@ public void testRole() { .withId(1L) .withName(roleName) .withAuditInfo(auditInfo) - .securableObject(SecurableObjects.of(catalogName)) + .withSecurableObject(SecurableObjects.of(catalogName)) .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) .withProperties(map) .build(); @@ -286,7 +286,7 @@ public void testRole() { .withId(1L) .withName(roleName) .withAuditInfo(auditInfo) - .securableObject(SecurableObjects.of(catalogName)) + .withSecurableObject(SecurableObjects.of(catalogName)) .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) .build(); Assertions.assertNull(roleWithoutFields.properties()); diff --git a/core/src/test/java/com/datastrato/gravitino/proto/TestEntityProtoSerDe.java b/core/src/test/java/com/datastrato/gravitino/proto/TestEntityProtoSerDe.java index 9b63d32167e..8223ac239c9 100644 --- a/core/src/test/java/com/datastrato/gravitino/proto/TestEntityProtoSerDe.java +++ b/core/src/test/java/com/datastrato/gravitino/proto/TestEntityProtoSerDe.java @@ -332,7 +332,7 @@ public void testEntitiesSerDe() throws IOException { .withId(roleId) .withName(roleName) .withAuditInfo(auditInfo) - .securableObject(SecurableObjects.of(catalogName)) + .withSecurableObject(SecurableObjects.of(catalogName)) .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) .withProperties(props) .build(); @@ -345,7 +345,7 @@ public void testEntitiesSerDe() throws IOException { .withId(1L) .withName(roleName) .withAuditInfo(auditInfo) - .securableObject(SecurableObjects.of(catalogName)) + .withSecurableObject(SecurableObjects.of(catalogName)) .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) .build(); roleBytes = protoEntitySerDe.serialize(roleWithoutFields); diff --git a/core/src/test/java/com/datastrato/gravitino/storage/TestEntityStorage.java b/core/src/test/java/com/datastrato/gravitino/storage/TestEntityStorage.java index b7272c413a0..4df9a63f7df 100644 --- a/core/src/test/java/com/datastrato/gravitino/storage/TestEntityStorage.java +++ b/core/src/test/java/com/datastrato/gravitino/storage/TestEntityStorage.java @@ -1182,7 +1182,7 @@ private static RoleEntity createRole(String metalake, String name, AuditInfo aud metalake, CatalogEntity.SYSTEM_CATALOG_RESERVED_NAME, Entity.ROLE_SCHEMA_NAME)) .withName(name) .withAuditInfo(auditInfo) - .securableObject(SecurableObjects.of("catalog")) + .withSecurableObject(SecurableObjects.of("catalog")) .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) .withProperties(Collections.emptyMap()) .build(); diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/ExceptionHandlers.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/ExceptionHandlers.java index 0921a8690f6..130c6f2fa82 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/web/rest/ExceptionHandlers.java +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/ExceptionHandlers.java @@ -12,6 +12,7 @@ import com.datastrato.gravitino.exceptions.NonEmptySchemaException; import com.datastrato.gravitino.exceptions.NotFoundException; import com.datastrato.gravitino.exceptions.PartitionAlreadyExistsException; +import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; import com.datastrato.gravitino.exceptions.TableAlreadyExistsException; import com.datastrato.gravitino.exceptions.TopicAlreadyExistsException; @@ -68,6 +69,11 @@ public static Response handleGroupException( return GroupExceptionHandler.INSTANCE.handle(op, group, metalake, e); } + public static Response handleRoleException( + OperationType op, String role, String metalake, Exception e) { + return RoleExceptionHandler.INSTANCE.handle(op, role, metalake, e); + } + public static Response handleTopicException( OperationType op, String topic, String schema, Exception e) { return TopicExceptionHandler.INSTANCE.handle(op, topic, schema, e); @@ -345,6 +351,38 @@ public Response handle(OperationType op, String group, String metalake, Exceptio } } + private static class RoleExceptionHandler extends BaseExceptionHandler { + + private static final ExceptionHandler INSTANCE = new RoleExceptionHandler(); + + private static String getRoleErrorMsg( + String role, String operation, String metalake, String reason) { + return String.format( + "Failed to operate role %s operation [%s] under metalake [%s], reason [%s]", + role, operation, metalake, reason); + } + + @Override + public Response handle(OperationType op, String role, String metalake, Exception e) { + String formatted = StringUtil.isBlank(role) ? "" : " [" + role + "]"; + String errorMsg = getRoleErrorMsg(formatted, op.name(), metalake, getErrorMsg(e)); + LOG.warn(errorMsg, e); + + if (e instanceof IllegalArgumentException) { + return Utils.illegalArguments(errorMsg, e); + + } else if (e instanceof NotFoundException) { + return Utils.notFound(errorMsg, e); + + } else if (e instanceof RoleAlreadyExistsException) { + return Utils.alreadyExists(errorMsg, e); + + } else { + return super.handle(op, role, metalake, e); + } + } + } + private static class TopicExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new TopicExceptionHandler(); diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java new file mode 100644 index 00000000000..c89f27d0a7c --- /dev/null +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/RoleOperations.java @@ -0,0 +1,108 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.server.web.rest; + +import com.codahale.metrics.annotation.ResponseMetered; +import com.codahale.metrics.annotation.Timed; +import com.datastrato.gravitino.GravitinoEnv; +import com.datastrato.gravitino.authorization.AccessControlManager; +import com.datastrato.gravitino.authorization.Privileges; +import com.datastrato.gravitino.authorization.SecurableObjects; +import com.datastrato.gravitino.dto.requests.RoleCreateRequest; +import com.datastrato.gravitino.dto.responses.DropResponse; +import com.datastrato.gravitino.dto.responses.RoleResponse; +import com.datastrato.gravitino.dto.util.DTOConverters; +import com.datastrato.gravitino.metrics.MetricNames; +import com.datastrato.gravitino.server.web.Utils; +import java.util.stream.Collectors; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.Response; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Path("/metalakes/{metalake}/roles") +public class RoleOperations { + private static final Logger LOG = LoggerFactory.getLogger(RoleOperations.class); + + private final AccessControlManager accessControlManager; + + @Context private HttpServletRequest httpRequest; + + public RoleOperations() { + this.accessControlManager = GravitinoEnv.getInstance().accessControlManager(); + } + + @GET + @Path("{role}") + @Produces("application/vnd.gravitino.v1+json") + @Timed(name = "load-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) + @ResponseMetered(name = "load-role", absolute = true) + public Response loadRole(@PathParam("metalake") String metalake, @PathParam("role") String role) { + try { + return Utils.doAs( + httpRequest, + () -> + Utils.ok( + new RoleResponse( + DTOConverters.toDTO(accessControlManager.loadRole(metalake, role))))); + } catch (Exception e) { + return ExceptionHandlers.handleRoleException(OperationType.LOAD, role, metalake, e); + } + } + + @POST + @Produces("application/vnd.gravitino.v1+json") + @Timed(name = "create-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) + @ResponseMetered(name = "create-role", absolute = true) + public Response creatRole(@PathParam("metalake") String metalake, RoleCreateRequest request) { + try { + return Utils.doAs( + httpRequest, + () -> + Utils.ok( + new RoleResponse( + DTOConverters.toDTO( + accessControlManager.createRole( + metalake, + request.getName(), + request.getProperties(), + SecurableObjects.parse(request.getSecurableObject()), + request.getPrivileges().stream() + .map(Privileges::fromString) + .collect(Collectors.toList())))))); + } catch (Exception e) { + return ExceptionHandlers.handleRoleException( + OperationType.CREATE, request.getName(), metalake, e); + } + } + + @DELETE + @Path("{role}") + @Produces("application/vnd.gravitino.v1+json") + @Timed(name = "drop-role." + MetricNames.HTTP_PROCESS_DURATION, absolute = true) + @ResponseMetered(name = "drop-role", absolute = true) + public Response dropRole(@PathParam("metalake") String metalake, @PathParam("role") String role) { + try { + return Utils.doAs( + httpRequest, + () -> { + boolean dropped = accessControlManager.dropRole(metalake, role); + if (!dropped) { + LOG.warn("Failed to drop role {} under metalake {}", role, metalake); + } + return Utils.ok(new DropResponse(dropped)); + }); + } catch (Exception e) { + return ExceptionHandlers.handleRoleException(OperationType.DROP, role, metalake, e); + } + } +} diff --git a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java new file mode 100644 index 00000000000..17d74cc02bb --- /dev/null +++ b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestRoleOperations.java @@ -0,0 +1,298 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.server.web.rest; + +import static com.datastrato.gravitino.Configs.TREE_LOCK_CLEAN_INTERVAL; +import static com.datastrato.gravitino.Configs.TREE_LOCK_MAX_NODE_IN_MEMORY; +import static com.datastrato.gravitino.Configs.TREE_LOCK_MIN_NODE_IN_MEMORY; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastrato.gravitino.Config; +import com.datastrato.gravitino.GravitinoEnv; +import com.datastrato.gravitino.authorization.AccessControlManager; +import com.datastrato.gravitino.authorization.Privileges; +import com.datastrato.gravitino.authorization.Role; +import com.datastrato.gravitino.authorization.SecurableObjects; +import com.datastrato.gravitino.dto.authorization.RoleDTO; +import com.datastrato.gravitino.dto.requests.RoleCreateRequest; +import com.datastrato.gravitino.dto.responses.DropResponse; +import com.datastrato.gravitino.dto.responses.ErrorConstants; +import com.datastrato.gravitino.dto.responses.ErrorResponse; +import com.datastrato.gravitino.dto.responses.RoleResponse; +import com.datastrato.gravitino.exceptions.NoSuchMetalakeException; +import com.datastrato.gravitino.exceptions.NoSuchRoleException; +import com.datastrato.gravitino.exceptions.RoleAlreadyExistsException; +import com.datastrato.gravitino.lock.LockManager; +import com.datastrato.gravitino.meta.AuditInfo; +import com.datastrato.gravitino.meta.RoleEntity; +import com.datastrato.gravitino.rest.RESTUtils; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.time.Instant; +import java.util.Collections; +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.client.Entity; +import javax.ws.rs.core.Application; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.server.ResourceConfig; +import org.glassfish.jersey.test.JerseyTest; +import org.glassfish.jersey.test.TestProperties; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class TestRoleOperations extends JerseyTest { + + private static final AccessControlManager manager = mock(AccessControlManager.class); + + private static class MockServletRequestFactory extends ServletRequestFactoryBase { + @Override + public HttpServletRequest get() { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getRemoteUser()).thenReturn(null); + return request; + } + } + + @BeforeAll + public static void setup() { + Config config = mock(Config.class); + Mockito.doReturn(100000L).when(config).get(TREE_LOCK_MAX_NODE_IN_MEMORY); + Mockito.doReturn(1000L).when(config).get(TREE_LOCK_MIN_NODE_IN_MEMORY); + Mockito.doReturn(36000L).when(config).get(TREE_LOCK_CLEAN_INTERVAL); + GravitinoEnv.getInstance().setLockManager(new LockManager(config)); + GravitinoEnv.getInstance().setAccessControlManager(manager); + } + + @Override + protected Application configure() { + try { + forceSet( + TestProperties.CONTAINER_PORT, String.valueOf(RESTUtils.findAvailablePort(2000, 3000))); + } catch (IOException e) { + throw new RuntimeException(e); + } + + ResourceConfig resourceConfig = new ResourceConfig(); + resourceConfig.register(RoleOperations.class); + resourceConfig.register( + new AbstractBinder() { + @Override + protected void configure() { + bindFactory(MockServletRequestFactory.class).to(HttpServletRequest.class); + } + }); + + return resourceConfig; + } + + @Test + public void testCreateRole() { + RoleCreateRequest req = + new RoleCreateRequest( + "role", + Collections.emptyMap(), + Lists.newArrayList(Privileges.LoadCatalog.get().name().toString()), + SecurableObjects.of("catalog").toString()); + Role role = buildRole("role1"); + + when(manager.createRole(any(), any(), any(), any(), any())).thenReturn(role); + + Response resp = + target("/metalakes/metalake1/roles") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .post(Entity.entity(req, MediaType.APPLICATION_JSON_TYPE)); + + Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); + Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getMediaType()); + + RoleResponse roleResponse = resp.readEntity(RoleResponse.class); + Assertions.assertEquals(0, roleResponse.getCode()); + + RoleDTO roleDTO = roleResponse.getRole(); + Assertions.assertEquals("role1", roleDTO.name()); + Assertions.assertEquals(SecurableObjects.of("catalog"), roleDTO.securableObject()); + Assertions.assertEquals(Lists.newArrayList(Privileges.LoadCatalog.get()), roleDTO.privileges()); + + // Test to throw NoSuchMetalakeException + doThrow(new NoSuchMetalakeException("mock error")) + .when(manager) + .createRole(any(), any(), any(), any(), any()); + Response resp1 = + target("/metalakes/metalake1/roles") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .post(Entity.entity(req, MediaType.APPLICATION_JSON_TYPE)); + + Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); + Assertions.assertEquals(MediaType.APPLICATION_JSON_TYPE, resp1.getMediaType()); + + ErrorResponse errorResponse = resp1.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResponse.getCode()); + Assertions.assertEquals(NoSuchMetalakeException.class.getSimpleName(), errorResponse.getType()); + + // Test to throw RoleAlreadyExistsException + doThrow(new RoleAlreadyExistsException("mock error")) + .when(manager) + .createRole(any(), any(), any(), any(), any()); + Response resp2 = + target("/metalakes/metalake1/roles") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .post(Entity.entity(req, MediaType.APPLICATION_JSON_TYPE)); + + Assertions.assertEquals(Response.Status.CONFLICT.getStatusCode(), resp2.getStatus()); + + ErrorResponse errorResponse1 = resp2.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.ALREADY_EXISTS_CODE, errorResponse1.getCode()); + Assertions.assertEquals( + RoleAlreadyExistsException.class.getSimpleName(), errorResponse1.getType()); + + // Test to throw internal RuntimeException + doThrow(new RuntimeException("mock error")) + .when(manager) + .createRole(any(), any(), any(), any(), any()); + Response resp3 = + target("/metalakes/metalake1/roles") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .post(Entity.entity(req, MediaType.APPLICATION_JSON_TYPE)); + + Assertions.assertEquals( + Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp3.getStatus()); + + ErrorResponse errorResponse2 = resp3.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResponse2.getCode()); + Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResponse2.getType()); + } + + @Test + public void testLoadRole() { + Role role = buildRole("role1"); + + when(manager.loadRole(any(), any())).thenReturn(role); + + Response resp = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .get(); + + Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); + + RoleResponse roleResponse = resp.readEntity(RoleResponse.class); + Assertions.assertEquals(0, roleResponse.getCode()); + RoleDTO roleDTO = roleResponse.getRole(); + Assertions.assertEquals("role1", roleDTO.name()); + Assertions.assertTrue(role.properties().isEmpty()); + Assertions.assertEquals(SecurableObjects.of("catalog"), roleDTO.securableObject()); + Assertions.assertEquals(Lists.newArrayList(Privileges.LoadCatalog.get()), roleDTO.privileges()); + + // Test to throw NoSuchMetalakeException + doThrow(new NoSuchMetalakeException("mock error")).when(manager).loadRole(any(), any()); + Response resp1 = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .get(); + + Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp1.getStatus()); + + ErrorResponse errorResponse = resp1.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResponse.getCode()); + Assertions.assertEquals(NoSuchMetalakeException.class.getSimpleName(), errorResponse.getType()); + + // Test to throw NoSuchRoleException + doThrow(new NoSuchRoleException("mock error")).when(manager).loadRole(any(), any()); + Response resp2 = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .get(); + + Assertions.assertEquals(Response.Status.NOT_FOUND.getStatusCode(), resp2.getStatus()); + + ErrorResponse errorResponse1 = resp2.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.NOT_FOUND_CODE, errorResponse1.getCode()); + Assertions.assertEquals(NoSuchRoleException.class.getSimpleName(), errorResponse1.getType()); + + // Test to throw internal RuntimeException + doThrow(new RuntimeException("mock error")).when(manager).loadRole(any(), any()); + Response resp3 = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .get(); + + Assertions.assertEquals( + Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp3.getStatus()); + + ErrorResponse errorResponse2 = resp3.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResponse2.getCode()); + Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResponse2.getType()); + } + + private Role buildRole(String role) { + return RoleEntity.builder() + .withId(1L) + .withName(role) + .withPrivileges(Lists.newArrayList(Privileges.LoadCatalog.get())) + .withProperties(Collections.emptyMap()) + .withSecurableObject(SecurableObjects.of("catalog")) + .withAuditInfo( + AuditInfo.builder().withCreator("creator").withCreateTime(Instant.now()).build()) + .build(); + } + + @Test + public void testDropRole() { + when(manager.dropRole(any(), any())).thenReturn(true); + + Response resp = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .delete(); + + Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp.getStatus()); + DropResponse dropResponse = resp.readEntity(DropResponse.class); + Assertions.assertEquals(0, dropResponse.getCode()); + Assertions.assertTrue(dropResponse.dropped()); + + // Test when failed to drop role + when(manager.dropRole(any(), any())).thenReturn(false); + Response resp2 = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .delete(); + + Assertions.assertEquals(Response.Status.OK.getStatusCode(), resp2.getStatus()); + DropResponse dropResponse2 = resp2.readEntity(DropResponse.class); + Assertions.assertEquals(0, dropResponse2.getCode()); + Assertions.assertFalse(dropResponse2.dropped()); + + doThrow(new RuntimeException("mock error")).when(manager).dropRole(any(), any()); + Response resp3 = + target("/metalakes/metalake1/roles/role1") + .request(MediaType.APPLICATION_JSON_TYPE) + .accept("application/vnd.gravitino.v1+json") + .delete(); + + Assertions.assertEquals( + Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), resp3.getStatus()); + + ErrorResponse errorResponse = resp3.readEntity(ErrorResponse.class); + Assertions.assertEquals(ErrorConstants.INTERNAL_ERROR_CODE, errorResponse.getCode()); + Assertions.assertEquals(RuntimeException.class.getSimpleName(), errorResponse.getType()); + } +} From d1d99954c0ab0651fb634be0f0b0b74bcbd25d97 Mon Sep 17 00:00:00 2001 From: FANNG Date: Tue, 16 Apr 2024 13:03:07 +0800 Subject: [PATCH 4/7] [#2769] feat(core): supports schema event for event listener (#2880) ### What changes were proposed in this pull request? supports schema events for event listener system ### Why are the changes needed? Fix: #2769 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? existing tests --- .../datastrato/gravitino/GravitinoEnv.java | 15 +- .../gravitino/catalog/SchemaDispatcher.java | 16 +++ .../catalog/SchemaEventDispatcher.java | 132 ++++++++++++++++++ .../catalog/SchemaOperationDispatcher.java | 3 +- .../listener/api/event/AlterSchemaEvent.java | 47 +++++++ .../api/event/AlterSchemaFailureEvent.java | 35 +++++ .../listener/api/event/CreateSchemaEvent.java | 32 +++++ .../api/event/CreateSchemaFailureEvent.java | 35 +++++ .../listener/api/event/DropSchemaEvent.java | 44 ++++++ .../api/event/DropSchemaFailureEvent.java | 34 +++++ .../listener/api/event/ListSchemaEvent.java | 30 ++++ .../api/event/ListSchemaFailureEvent.java | 33 +++++ .../listener/api/event/LoadSchemaEvent.java | 31 ++++ .../api/event/LoadSchemaFailureEvent.java | 17 +++ .../listener/api/event/SchemaEvent.java | 17 +++ .../api/event/SchemaFailureEvent.java | 20 +++ .../listener/api/info/SchemaInfo.java | 84 +++++++++++ .../gravitino/server/GravitinoServer.java | 6 +- .../server/web/rest/SchemaOperations.java | 6 +- .../server/web/rest/TestSchemaOperations.java | 3 +- 20 files changed, 624 insertions(+), 16 deletions(-) create mode 100644 core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java create mode 100644 core/src/main/java/com/datastrato/gravitino/catalog/SchemaEventDispatcher.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaFailureEvent.java create mode 100644 core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java diff --git a/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java b/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java index 61144fe3e4b..e224fa48b2a 100644 --- a/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java +++ b/core/src/main/java/com/datastrato/gravitino/GravitinoEnv.java @@ -10,6 +10,8 @@ import com.datastrato.gravitino.catalog.FilesetDispatcher; import com.datastrato.gravitino.catalog.FilesetEventDispatcher; import com.datastrato.gravitino.catalog.FilesetOperationDispatcher; +import com.datastrato.gravitino.catalog.SchemaDispatcher; +import com.datastrato.gravitino.catalog.SchemaEventDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; import com.datastrato.gravitino.catalog.TableDispatcher; import com.datastrato.gravitino.catalog.TableEventDispatcher; @@ -41,7 +43,7 @@ public class GravitinoEnv { private CatalogManager catalogManager; - private SchemaOperationDispatcher schemaOperationDispatcher; + private SchemaDispatcher schemaDispatcher; private TableDispatcher tableDispatcher; @@ -129,8 +131,9 @@ public void initialize(Config config) { // Create and initialize Catalog related modules this.catalogManager = new CatalogManager(config, entityStore, idGenerator); - this.schemaOperationDispatcher = + SchemaOperationDispatcher schemaOperationDispatcher = new SchemaOperationDispatcher(catalogManager, entityStore, idGenerator); + this.schemaDispatcher = new SchemaEventDispatcher(eventBus, schemaOperationDispatcher); TableOperationDispatcher tableOperationDispatcher = new TableOperationDispatcher(catalogManager, entityStore, idGenerator); this.tableDispatcher = new TableEventDispatcher(eventBus, tableOperationDispatcher); @@ -186,12 +189,12 @@ public CatalogManager catalogManager() { } /** - * Get the SchemaOperationDispatcher associated with the Gravitino environment. + * Get the SchemaDispatcher associated with the Gravitino environment. * - * @return The SchemaOperationDispatcher instance. + * @return The SchemaDispatcher instance. */ - public SchemaOperationDispatcher schemaOperationDispatcher() { - return schemaOperationDispatcher; + public SchemaDispatcher schemaDispatcher() { + return schemaDispatcher; } /** diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java new file mode 100644 index 00000000000..0667da3df91 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaDispatcher.java @@ -0,0 +1,16 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.rel.SupportsSchemas; + +/** + * {@code SchemaDispatcher} interface acts as a specialization of the {@link SupportsSchemas} + * interface. This interface is designed to potentially add custom behaviors or operations related + * to dispatching or handling schema-related events or actions that are not covered by the standard + * {@code SupportsSchemas} operations. + */ +public interface SchemaDispatcher extends SupportsSchemas {} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaEventDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaEventDispatcher.java new file mode 100644 index 00000000000..7ca34093942 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaEventDispatcher.java @@ -0,0 +1,132 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.catalog; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.exceptions.NoSuchCatalogException; +import com.datastrato.gravitino.exceptions.NoSuchSchemaException; +import com.datastrato.gravitino.exceptions.NonEmptySchemaException; +import com.datastrato.gravitino.exceptions.SchemaAlreadyExistsException; +import com.datastrato.gravitino.listener.EventBus; +import com.datastrato.gravitino.listener.api.event.AlterSchemaEvent; +import com.datastrato.gravitino.listener.api.event.AlterSchemaFailureEvent; +import com.datastrato.gravitino.listener.api.event.CreateSchemaEvent; +import com.datastrato.gravitino.listener.api.event.CreateSchemaFailureEvent; +import com.datastrato.gravitino.listener.api.event.DropSchemaEvent; +import com.datastrato.gravitino.listener.api.event.DropSchemaFailureEvent; +import com.datastrato.gravitino.listener.api.event.ListSchemaEvent; +import com.datastrato.gravitino.listener.api.event.ListSchemaFailureEvent; +import com.datastrato.gravitino.listener.api.event.LoadSchemaEvent; +import com.datastrato.gravitino.listener.api.event.LoadSchemaFailureEvent; +import com.datastrato.gravitino.listener.api.info.SchemaInfo; +import com.datastrato.gravitino.rel.Schema; +import com.datastrato.gravitino.rel.SchemaChange; +import com.datastrato.gravitino.utils.PrincipalUtils; +import java.util.Map; + +/** + * {@code SchemaEventDispatcher} is a decorator for {@link SchemaDispatcher} that not only delegates + * schema operations to the underlying schema dispatcher but also dispatches corresponding events to + * an {@link EventBus} after each operation is completed. This allows for event-driven workflows or + * monitoring of schema operations. + */ +public class SchemaEventDispatcher implements SchemaDispatcher { + private final EventBus eventBus; + private final SchemaDispatcher dispatcher; + + /** + * Constructs a SchemaEventDispatcher with a specified EventBus and SchemaDispatcher. + * + * @param eventBus The EventBus to which events will be dispatched. + * @param dispatcher The underlying {@link SchemaOperationDispatcher} that will perform the actual + * schema operations. + */ + public SchemaEventDispatcher(EventBus eventBus, SchemaDispatcher dispatcher) { + this.eventBus = eventBus; + this.dispatcher = dispatcher; + } + + @Override + public NameIdentifier[] listSchemas(Namespace namespace) throws NoSuchCatalogException { + try { + NameIdentifier[] nameIdentifiers = dispatcher.listSchemas(namespace); + eventBus.dispatchEvent(new ListSchemaEvent(PrincipalUtils.getCurrentUserName(), namespace)); + return nameIdentifiers; + } catch (Exception e) { + eventBus.dispatchEvent( + new ListSchemaFailureEvent(PrincipalUtils.getCurrentUserName(), namespace, e)); + throw e; + } + } + + @Override + public boolean schemaExists(NameIdentifier ident) { + return dispatcher.schemaExists(ident); + } + + @Override + public Schema createSchema(NameIdentifier ident, String comment, Map properties) + throws NoSuchCatalogException, SchemaAlreadyExistsException { + try { + Schema schema = dispatcher.createSchema(ident, comment, properties); + eventBus.dispatchEvent( + new CreateSchemaEvent( + PrincipalUtils.getCurrentUserName(), ident, new SchemaInfo(schema))); + return schema; + } catch (Exception e) { + SchemaInfo createSchemaRequest = new SchemaInfo(ident.name(), comment, properties, null); + eventBus.dispatchEvent( + new CreateSchemaFailureEvent( + PrincipalUtils.getCurrentUserName(), ident, e, createSchemaRequest)); + throw e; + } + } + + @Override + public Schema loadSchema(NameIdentifier ident) throws NoSuchSchemaException { + try { + Schema schema = dispatcher.loadSchema(ident); + eventBus.dispatchEvent( + new LoadSchemaEvent(PrincipalUtils.getCurrentUserName(), ident, new SchemaInfo(schema))); + return schema; + } catch (Exception e) { + eventBus.dispatchEvent( + new LoadSchemaFailureEvent(PrincipalUtils.getCurrentUserName(), ident, e)); + throw e; + } + } + + @Override + public Schema alterSchema(NameIdentifier ident, SchemaChange... changes) + throws NoSuchSchemaException { + try { + Schema schema = dispatcher.alterSchema(ident, changes); + eventBus.dispatchEvent( + new AlterSchemaEvent( + PrincipalUtils.getCurrentUserName(), ident, changes, new SchemaInfo(schema))); + return schema; + } catch (Exception e) { + eventBus.dispatchEvent( + new AlterSchemaFailureEvent(PrincipalUtils.getCurrentUserName(), ident, e, changes)); + throw e; + } + } + + @Override + public boolean dropSchema(NameIdentifier ident, boolean cascade) throws NonEmptySchemaException { + try { + boolean isExists = dispatcher.dropSchema(ident, cascade); + eventBus.dispatchEvent( + new DropSchemaEvent(PrincipalUtils.getCurrentUserName(), ident, isExists, cascade)); + return isExists; + } catch (Exception e) { + eventBus.dispatchEvent( + new DropSchemaFailureEvent(PrincipalUtils.getCurrentUserName(), ident, e, cascade)); + throw e; + } + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java index 20a952f0ae4..b13124c3af0 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/SchemaOperationDispatcher.java @@ -21,7 +21,6 @@ import com.datastrato.gravitino.meta.SchemaEntity; import com.datastrato.gravitino.rel.Schema; import com.datastrato.gravitino.rel.SchemaChange; -import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.storage.IdGenerator; import com.datastrato.gravitino.utils.PrincipalUtils; import java.time.Instant; @@ -29,7 +28,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SchemaOperationDispatcher extends OperationDispatcher implements SupportsSchemas { +public class SchemaOperationDispatcher extends OperationDispatcher implements SchemaDispatcher { private static final Logger LOG = LoggerFactory.getLogger(SchemaOperationDispatcher.class); diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java new file mode 100644 index 00000000000..de32ec82067 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaEvent.java @@ -0,0 +1,47 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.SchemaInfo; +import com.datastrato.gravitino.rel.SchemaChange; + +/** Represents an event fired when a schema is successfully altered. */ +@DeveloperApi +public final class AlterSchemaEvent extends SchemaEvent { + private final SchemaChange[] schemaChanges; + private final SchemaInfo updatedSchemaInfo; + + public AlterSchemaEvent( + String user, + NameIdentifier identifier, + SchemaChange[] schemaChanges, + SchemaInfo updatedSchemaInfo) { + super(user, identifier); + this.schemaChanges = schemaChanges.clone(); + this.updatedSchemaInfo = updatedSchemaInfo; + } + + /** + * Retrieves the updated state of the schema after the successful alteration. + * + * @return A {@link SchemaInfo} instance encapsulating the details of the altered schema. + */ + public SchemaInfo updatedSchemaInfo() { + return updatedSchemaInfo; + } + + /** + * Retrieves the specific changes that were made to the schema during the alteration process. + * + * @return An array of {@link SchemaChange} objects detailing each modification applied to the + * schema. + */ + public SchemaChange[] schemaChanges() { + return schemaChanges; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java new file mode 100644 index 00000000000..a5b0643e9ef --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/AlterSchemaFailureEvent.java @@ -0,0 +1,35 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.rel.SchemaChange; + +/** + * Represents an event that is triggered when an attempt to alter a schema fails due to an + * exception. + */ +@DeveloperApi +public final class AlterSchemaFailureEvent extends SchemaFailureEvent { + private final SchemaChange[] schemaChanges; + + public AlterSchemaFailureEvent( + String user, NameIdentifier identifier, Exception exception, SchemaChange[] schemaChanges) { + super(user, identifier, exception); + this.schemaChanges = schemaChanges.clone(); + } + + /** + * Retrieves the specific changes that were made to the schema during the alteration process. + * + * @return An array of {@link SchemaChange} objects detailing each modification applied to the + * schema. + */ + public SchemaChange[] schemaChanges() { + return schemaChanges; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaEvent.java new file mode 100644 index 00000000000..4c5f08e475b --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaEvent.java @@ -0,0 +1,32 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.SchemaInfo; + +/** Represents an event triggered upon the successful creation of a schema. */ +@DeveloperApi +public final class CreateSchemaEvent extends SchemaEvent { + private final SchemaInfo createdSchemaInfo; + + public CreateSchemaEvent(String user, NameIdentifier identifier, SchemaInfo schemaInfo) { + super(user, identifier); + this.createdSchemaInfo = schemaInfo; + } + + /** + * Retrieves the final state of the schema as it was returned to the user after successful + * creation. + * + * @return A {@link SchemaInfo} instance encapsulating the comprehensive details of the newly + * created schema. + */ + public SchemaInfo createdSchemaInfo() { + return createdSchemaInfo; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaFailureEvent.java new file mode 100644 index 00000000000..5215d1b623d --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/CreateSchemaFailureEvent.java @@ -0,0 +1,35 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.SchemaInfo; + +/** + * Represents an event that is generated when an attempt to create a schema fails due to an + * exception. + */ +@DeveloperApi +public final class CreateSchemaFailureEvent extends SchemaFailureEvent { + private final SchemaInfo createSchemaRequest; + + public CreateSchemaFailureEvent( + String user, NameIdentifier identifier, Exception exception, SchemaInfo createSchemaRequest) { + super(user, identifier, exception); + this.createSchemaRequest = createSchemaRequest; + } + + /** + * Retrieves the original request information for the attempted schema creation. + * + * @return The {@link SchemaInfo} instance representing the request information for the failed + * schema creation attempt. + */ + public SchemaInfo createSchemaRequest() { + return createSchemaRequest; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaEvent.java new file mode 100644 index 00000000000..6c1dbbda917 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaEvent.java @@ -0,0 +1,44 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** Represents an event that is generated after a schema is successfully dropped. */ +@DeveloperApi +public final class DropSchemaEvent extends SchemaEvent { + private final boolean isExists; + private final boolean cascade; + + public DropSchemaEvent( + String user, NameIdentifier identifier, boolean isExists, boolean cascade) { + super(user, identifier); + this.isExists = isExists; + this.cascade = cascade; + } + + /** + * Retrieves the existence status of the schema at the time of the drop operation. + * + * @return A boolean value indicating whether the schema existed. {@code true} if the schema + * existed, otherwise {@code false}. + */ + public boolean isExists() { + return isExists; + } + + /** + * Indicates whether the drop operation was performed with a cascade option. + * + * @return A boolean value indicating whether the drop operation was set to cascade. If {@code + * true}, dependent objects such as tables and views within the schema were also dropped. + * Otherwise, the operation would fail if the schema contained any dependent objects. + */ + public boolean cascade() { + return cascade; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaFailureEvent.java new file mode 100644 index 00000000000..5fa43362e8d --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/DropSchemaFailureEvent.java @@ -0,0 +1,34 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * Represents an event that is generated when an attempt to drop a schema fails due to an exception. + */ +@DeveloperApi +public final class DropSchemaFailureEvent extends SchemaFailureEvent { + private final boolean cascade; + + public DropSchemaFailureEvent( + String user, NameIdentifier identifier, Exception exception, boolean cascade) { + super(user, identifier, exception); + this.cascade = cascade; + } + + /** + * Indicates whether the drop operation was performed with a cascade option. + * + * @return A boolean value indicating whether the drop operation was set to cascade. If {@code + * true}, dependent objects such as tables and views within the schema were also dropped. + * Otherwise, the operation would fail if the schema contained any dependent objects. + */ + public boolean cascade() { + return cascade; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaEvent.java new file mode 100644 index 00000000000..dd13768aabb --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaEvent.java @@ -0,0 +1,30 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** Represents an event that is triggered upon the successful list of schemas. */ +@DeveloperApi +public final class ListSchemaEvent extends SchemaEvent { + private final Namespace namespace; + + public ListSchemaEvent(String user, Namespace namespace) { + super(user, NameIdentifier.of(namespace.toString())); + this.namespace = namespace; + } + + /** + * Provides the namespace associated with this event. + * + * @return A {@link Namespace} instance from which schema were listed. + */ + public Namespace namespace() { + return namespace; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaFailureEvent.java new file mode 100644 index 00000000000..cb233503db1 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/ListSchemaFailureEvent.java @@ -0,0 +1,33 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.Namespace; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * Represents an event that is triggered when an attempt to list schemas within a namespace fails + * due to an exception. + */ +@DeveloperApi +public final class ListSchemaFailureEvent extends SchemaFailureEvent { + private final Namespace namespace; + + public ListSchemaFailureEvent(String user, Namespace namespace, Exception exception) { + super(user, NameIdentifier.of(namespace.toString()), exception); + this.namespace = namespace; + } + + /** + * Retrieves the namespace associated with this failure event. + * + * @return A {@link Namespace} instance for which the schema listing was attempted + */ + public Namespace namespace() { + return namespace; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaEvent.java new file mode 100644 index 00000000000..c9418923020 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaEvent.java @@ -0,0 +1,31 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.listener.api.info.SchemaInfo; + +/** Represents an event triggered upon the successful loading of a schema. */ +@DeveloperApi +public final class LoadSchemaEvent extends SchemaEvent { + private final SchemaInfo loadedSchemaInfo; + + public LoadSchemaEvent(String user, NameIdentifier identifier, SchemaInfo loadedSchemaInfo) { + super(user, identifier); + this.loadedSchemaInfo = loadedSchemaInfo; + } + + /** + * Retrieves the state of the schema as it was made available to the user after successful + * loading. + * + * @return A {@link SchemaInfo} instance encapsulating the details of the schema as loaded. + */ + public SchemaInfo loadedSchemaInfo() { + return loadedSchemaInfo; + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaFailureEvent.java new file mode 100644 index 00000000000..c552568233a --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/LoadSchemaFailureEvent.java @@ -0,0 +1,17 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** Represents an event that occurs when an attempt to load a schema fails due to an exception. */ +@DeveloperApi +public final class LoadSchemaFailureEvent extends SchemaFailureEvent { + public LoadSchemaFailureEvent(String user, NameIdentifier identifier, Exception exception) { + super(user, identifier, exception); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaEvent.java new file mode 100644 index 00000000000..60b667afb38 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaEvent.java @@ -0,0 +1,17 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** Represents an abstract base class for events related to schema operations. */ +@DeveloperApi +public abstract class SchemaEvent extends Event { + protected SchemaEvent(String user, NameIdentifier identifier) { + super(user, identifier); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaFailureEvent.java b/core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaFailureEvent.java new file mode 100644 index 00000000000..3c0cd1e4af8 --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/event/SchemaFailureEvent.java @@ -0,0 +1,20 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.event; + +import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.annotation.DeveloperApi; + +/** + * An abstract class representing events that are triggered when a schema operation fails due to an + * exception. + */ +@DeveloperApi +public abstract class SchemaFailureEvent extends FailureEvent { + protected SchemaFailureEvent(String user, NameIdentifier identifier, Exception exception) { + super(user, identifier, exception); + } +} diff --git a/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java b/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java new file mode 100644 index 00000000000..b16192f22ce --- /dev/null +++ b/core/src/main/java/com/datastrato/gravitino/listener/api/info/SchemaInfo.java @@ -0,0 +1,84 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.listener.api.info; + +import com.datastrato.gravitino.Audit; +import com.datastrato.gravitino.annotation.DeveloperApi; +import com.datastrato.gravitino.rel.Schema; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import javax.annotation.Nullable; + +/** Provides read-only access to schema information for event listeners. */ +@DeveloperApi +public final class SchemaInfo { + private final String name; + @Nullable private final String comment; + private final Map properties; + @Nullable private final Audit audit; + + /** + * Constructs schema information based on a given schema. + * + * @param schema The schema to extract information from. + */ + public SchemaInfo(Schema schema) { + this(schema.name(), schema.comment(), schema.properties(), schema.auditInfo()); + } + + /** + * Constructs schema information with detailed parameters. + * + * @param name The name of the schema. + * @param comment An optional description of the schema. + * @param properties A map of schema properties. + * @param audit Optional audit information. + */ + public SchemaInfo(String name, String comment, Map properties, Audit audit) { + this.name = name; + this.comment = comment; + this.properties = properties == null ? ImmutableMap.of() : ImmutableMap.copyOf(properties); + this.audit = audit; + } + + /** + * Gets the schema name. + * + * @return The schema name. + */ + public String name() { + return name; + } + + /** + * Gets the optional schema comment. + * + * @return The schema comment, or null if not provided. + */ + @Nullable + public String comment() { + return comment; + } + + /** + * Gets the schema properties. + * + * @return An immutable map of schema properties. + */ + public Map properties() { + return properties; + } + + /** + * Gets the optional audit information. + * + * @return The audit information, or null if not provided. + */ + @Nullable + public Audit audit() { + return audit; + } +} diff --git a/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java b/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java index 9c7ba40a3a6..9dc90458ba0 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java +++ b/server/src/main/java/com/datastrato/gravitino/server/GravitinoServer.java @@ -8,7 +8,7 @@ import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.catalog.CatalogManager; import com.datastrato.gravitino.catalog.FilesetDispatcher; -import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; +import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.catalog.TableDispatcher; import com.datastrato.gravitino.metalake.MetalakeManager; import com.datastrato.gravitino.metrics.MetricsSystem; @@ -79,9 +79,7 @@ protected void configure() { bind(gravitinoEnv.metalakesManager()).to(MetalakeManager.class).ranked(1); bind(gravitinoEnv.catalogManager()).to(CatalogManager.class).ranked(1); - bind(gravitinoEnv.schemaOperationDispatcher()) - .to(SchemaOperationDispatcher.class) - .ranked(1); + bind(gravitinoEnv.schemaDispatcher()).to(SchemaDispatcher.class).ranked(1); bind(gravitinoEnv.tableDispatcher()).to(TableDispatcher.class).ranked(1); bind(gravitinoEnv.filesetDispatcher()).to(FilesetDispatcher.class).ranked(1); bind(gravitinoEnv.topicOperationDispatcher()) diff --git a/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java b/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java index 00b4ff6b13a..78e2fa6acfe 100644 --- a/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java +++ b/server/src/main/java/com/datastrato/gravitino/server/web/rest/SchemaOperations.java @@ -8,7 +8,7 @@ import com.codahale.metrics.annotation.Timed; import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.Namespace; -import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; +import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.dto.requests.SchemaCreateRequest; import com.datastrato.gravitino.dto.requests.SchemaUpdateRequest; import com.datastrato.gravitino.dto.requests.SchemaUpdatesRequest; @@ -47,12 +47,12 @@ public class SchemaOperations { private static final Logger LOG = LoggerFactory.getLogger(SchemaOperations.class); - private final SchemaOperationDispatcher dispatcher; + private final SchemaDispatcher dispatcher; @Context private HttpServletRequest httpRequest; @Inject - public SchemaOperations(SchemaOperationDispatcher dispatcher) { + public SchemaOperations(SchemaDispatcher dispatcher) { this.dispatcher = dispatcher; } diff --git a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java index 90dec8989ff..7b36cac1384 100644 --- a/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java +++ b/server/src/test/java/com/datastrato/gravitino/server/web/rest/TestSchemaOperations.java @@ -17,6 +17,7 @@ import com.datastrato.gravitino.Config; import com.datastrato.gravitino.GravitinoEnv; import com.datastrato.gravitino.NameIdentifier; +import com.datastrato.gravitino.catalog.SchemaDispatcher; import com.datastrato.gravitino.catalog.SchemaOperationDispatcher; import com.datastrato.gravitino.dto.rel.SchemaDTO; import com.datastrato.gravitino.dto.requests.SchemaCreateRequest; @@ -93,7 +94,7 @@ protected Application configure() { new AbstractBinder() { @Override protected void configure() { - bind(dispatcher).to(SchemaOperationDispatcher.class).ranked(2); + bind(dispatcher).to(SchemaDispatcher.class).ranked(2); bindFactory(MockServletRequestFactory.class).to(HttpServletRequest.class); } }); From 447733a710fadd49ca7faa0f65c9ead203ae355a Mon Sep 17 00:00:00 2001 From: charliecheng630 <74488612+charliecheng630@users.noreply.github.com> Date: Tue, 16 Apr 2024 13:03:44 +0800 Subject: [PATCH 5/7] [#2914] Improvement(trino-connector) Add missing override annotation to overriding GravitinoPlugin method. (#2959) ### What changes were proposed in this pull request? Add missing override annotation to overriding GravitinoPlugin method. ### Why are the changes needed? Methods that override Trino plugin should use the override annotation. Fix: #2914 ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? ITs and UTs --------- Co-authored-by: Charlie Cheng --- .../datastrato/gravitino/trino/connector/GravitinoPlugin.java | 1 + 1 file changed, 1 insertion(+) diff --git a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoPlugin.java b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoPlugin.java index f2da88362c2..e4bbae0467d 100644 --- a/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoPlugin.java +++ b/trino-connector/src/main/java/com/datastrato/gravitino/trino/connector/GravitinoPlugin.java @@ -11,6 +11,7 @@ /** Trino plugin endpoint, using java spi mechanism */ public class GravitinoPlugin implements Plugin { + @Override public Iterable getConnectorFactories() { return ImmutableList.of(new GravitinoConnectorFactory()); } From 2bb3e66632660ca5af2608520ca2e1e956e8b638 Mon Sep 17 00:00:00 2001 From: mchades Date: Tue, 16 Apr 2024 14:18:06 +0800 Subject: [PATCH 6/7] refactor(core): use capability of column default value (#2859) ### What changes were proposed in this pull request? - remove the column default value validation in Hive catalog and Iceberg catalog - available column default capability in the framework ### Why are the changes needed? Fix: #2953 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? tests added --- .../catalog/hive/HiveCatalogCapability.java | 9 +++ .../catalog/hive/HiveCatalogOperations.java | 29 ---------- .../gravitino/catalog/hive/TestHiveTable.java | 56 ------------------- .../hive/integration/test/CatalogHiveIT.java | 38 +++++++++++++ .../lakehouse/iceberg/IcebergCatalog.java | 6 ++ .../iceberg/IcebergCatalogCapability.java | 17 ++++++ .../iceberg/IcebergCatalogOperations.java | 17 +++--- .../iceberg/ops/IcebergTableOpsHelper.java | 15 ----- .../lakehouse/iceberg/TestIcebergTable.java | 42 -------------- .../integration/test/CatalogIcebergIT.java | 3 +- .../gravitino/catalog/CapabilityHelpers.java | 29 ++++++++-- .../gravitino/connector/BaseCatalog.java | 2 +- .../connector/capability/Capability.java | 42 ++++++++++++-- 13 files changed, 140 insertions(+), 165 deletions(-) create mode 100644 catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogCapability.java diff --git a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java index 3a162b22bb7..d98f6e12d7b 100644 --- a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java +++ b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogCapability.java @@ -16,4 +16,13 @@ public CapabilityResult columnNotNull() { "The NOT NULL constraint for column is only supported since Hive 3.0, " + "but the current Gravitino Hive catalog only supports Hive 2.x."); } + + @Override + public CapabilityResult columnDefaultValue() { + // The DEFAULT constraint for column is supported since Hive3.0, see + // https://issues.apache.org/jira/browse/HIVE-18726 + return CapabilityResult.unsupported( + "The DEFAULT constraint for column is only supported since Hive 3.0, " + + "but the current Gravitino Hive catalog only supports Hive 2.x."); + } } diff --git a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java index e820376d00c..5f5d7c836d0 100644 --- a/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java +++ b/catalogs/catalog-hive/src/main/java/com/datastrato/gravitino/catalog/hive/HiveCatalogOperations.java @@ -35,7 +35,6 @@ import com.datastrato.gravitino.rel.Table; import com.datastrato.gravitino.rel.TableCatalog; import com.datastrato.gravitino.rel.TableChange; -import com.datastrato.gravitino.rel.expressions.Expression; import com.datastrato.gravitino.rel.expressions.NamedReference; import com.datastrato.gravitino.rel.expressions.distributions.Distribution; import com.datastrato.gravitino.rel.expressions.distributions.Distributions; @@ -591,11 +590,6 @@ private void validateColumnChangeForAlter( || !partitionFields.contains(fieldToAdd), "Cannot alter partition column: " + fieldToAdd); - if (c instanceof TableChange.UpdateColumnDefaultValue) { - throw new IllegalArgumentException( - "Hive does not support altering column default value"); - } - if (c instanceof TableChange.UpdateColumnPosition && afterPartitionColumn( partitionFields, ((TableChange.UpdateColumnPosition) c).getPosition())) { @@ -682,12 +676,6 @@ public Table createTable( validatePartitionForCreate(columns, partitioning); validateDistributionAndSort(distribution, sortOrders); - Arrays.stream(columns) - .forEach( - c -> { - validateColumnDefaultValue(c.name(), c.defaultValue()); - }); - TableType tableType = (TableType) tablePropertiesMetadata.getOrDefault(properties, TABLE_TYPE); Preconditions.checkArgument( SUPPORT_TABLE_TYPES.contains(tableType.name()), @@ -784,8 +772,6 @@ public Table alterTable(NameIdentifier tableIdent, TableChange... changes) if (change instanceof TableChange.AddColumn) { TableChange.AddColumn addColumn = (TableChange.AddColumn) change; - String fieldName = String.join(".", addColumn.fieldName()); - validateColumnDefaultValue(fieldName, addColumn.getDefaultValue()); doAddColumn(cols, addColumn); } else if (change instanceof TableChange.DeleteColumn) { @@ -803,10 +789,6 @@ public Table alterTable(NameIdentifier tableIdent, TableChange... changes) } else if (change instanceof TableChange.UpdateColumnType) { doUpdateColumnType(cols, (TableChange.UpdateColumnType) change); - } else if (change instanceof TableChange.UpdateColumnDefaultValue) { - throw new IllegalArgumentException( - "Hive does not support altering column default value"); - } else if (change instanceof TableChange.UpdateColumnAutoIncrement) { throw new IllegalArgumentException( "Hive does not support altering column auto increment"); @@ -854,17 +836,6 @@ public Table alterTable(NameIdentifier tableIdent, TableChange... changes) } } - private void validateColumnDefaultValue(String fieldName, Expression defaultValue) { - // The DEFAULT constraint for column is supported since Hive3.0, see - // https://issues.apache.org/jira/browse/HIVE-18726 - if (!defaultValue.equals(Column.DEFAULT_VALUE_NOT_SET)) { - throw new IllegalArgumentException( - "The DEFAULT constraint for column is only supported since Hive 3.0, " - + "but the current Gravitino Hive catalog only supports Hive 2.x. Illegal column: " - + fieldName); - } - } - private int columnPosition(List columns, TableChange.ColumnPosition position) { Preconditions.checkArgument(position != null, "Column position cannot be null"); if (position instanceof TableChange.After) { diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java index c5a98cf5a78..d54889de6a6 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/TestHiveTable.java @@ -25,13 +25,11 @@ import com.datastrato.gravitino.rel.expressions.NamedReference; import com.datastrato.gravitino.rel.expressions.distributions.Distribution; import com.datastrato.gravitino.rel.expressions.distributions.Distributions; -import com.datastrato.gravitino.rel.expressions.literals.Literals; import com.datastrato.gravitino.rel.expressions.sorts.NullOrdering; import com.datastrato.gravitino.rel.expressions.sorts.SortDirection; import com.datastrato.gravitino.rel.expressions.sorts.SortOrder; import com.datastrato.gravitino.rel.expressions.sorts.SortOrders; import com.datastrato.gravitino.rel.expressions.transforms.Transform; -import com.datastrato.gravitino.rel.expressions.transforms.Transforms; import com.datastrato.gravitino.rel.types.Types; import com.google.common.collect.Maps; import java.time.Instant; @@ -201,34 +199,6 @@ public void testCreateHiveTable() { distribution, sortOrders)); Assertions.assertTrue(exception.getMessage().contains("Table already exists")); - - HiveColumn withDefault = - HiveColumn.builder() - .withName("col_3") - .withType(Types.ByteType.get()) - .withComment(HIVE_COMMENT) - .withNullable(true) - .withDefaultValue(Literals.NULL) - .build(); - exception = - Assertions.assertThrows( - IllegalArgumentException.class, - () -> - tableCatalog.createTable( - tableIdentifier, - new Column[] {withDefault}, - HIVE_COMMENT, - properties, - Transforms.EMPTY_TRANSFORM, - distribution, - sortOrders)); - Assertions.assertTrue( - exception - .getMessage() - .contains( - "The DEFAULT constraint for column is only supported since Hive 3.0, " - + "but the current Gravitino Hive catalog only supports Hive 2.x"), - "The exception message is: " + exception.getMessage()); } @Test @@ -447,32 +417,6 @@ public void testAlterHiveTable() { () -> tableCatalog.alterTable(tableIdentifier, tableChange6)); Assertions.assertTrue(exception.getMessage().contains("Cannot add column with duplicate name")); - TableChange tableChange8 = - TableChange.addColumn( - new String[] {"col_3"}, Types.ByteType.get(), "comment", Literals.NULL); - exception = - Assertions.assertThrows( - IllegalArgumentException.class, - () -> tableCatalog.alterTable(tableIdentifier, tableChange8)); - Assertions.assertTrue( - exception - .getMessage() - .contains( - "The DEFAULT constraint for column is only supported since Hive 3.0, " - + "but the current Gravitino Hive catalog only supports Hive 2.x"), - "The exception message is: " + exception.getMessage()); - - TableChange tableChange9 = - TableChange.updateColumnDefaultValue( - new String[] {"col_1"}, Literals.of("0", Types.ByteType.get())); - exception = - Assertions.assertThrows( - IllegalArgumentException.class, - () -> tableCatalog.alterTable(tableIdentifier, tableChange9)); - - Assertions.assertEquals( - "Hive does not support altering column default value", exception.getMessage()); - // test alter tableCatalog.alterTable( tableIdentifier, diff --git a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java index 8207b3bebae..c660d8185d3 100644 --- a/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java +++ b/catalogs/catalog-hive/src/test/java/com/datastrato/gravitino/catalog/hive/integration/test/CatalogHiveIT.java @@ -507,6 +507,30 @@ public void testCreateHiveTable() throws TException, InterruptedException { .contains( "The NOT NULL constraint for column is only supported since Hive 3.0, " + "but the current Gravitino Hive catalog only supports Hive 2.x")); + + // test column default value + Column withDefault = + Column.of( + "default_column", Types.StringType.get(), "default column", true, false, Literals.NULL); + exception = + assertThrows( + IllegalArgumentException.class, + () -> + catalog + .asTableCatalog() + .createTable( + nameIdentifier, + new Column[] {withDefault}, + TABLE_COMMENT, + properties, + Transforms.EMPTY_TRANSFORM)); + Assertions.assertTrue( + exception + .getMessage() + .contains( + "The DEFAULT constraint for column is only supported since Hive 3.0, " + + "but the current Gravitino Hive catalog only supports Hive 2.x"), + "The exception message is: " + exception.getMessage()); } @Test @@ -1134,6 +1158,20 @@ public void testAlterHiveTable() throws TException, InterruptedException { "The NOT NULL constraint for column is only supported since Hive 3.0," + " but the current Gravitino Hive catalog only supports Hive 2.x. Illegal column: hive_col_name1")); + // test update column default value exception + TableChange updateDefaultValue = + TableChange.updateColumnDefaultValue(new String[] {HIVE_COL_NAME1}, Literals.NULL); + exception = + assertThrows( + IllegalArgumentException.class, () -> tableCatalog.alterTable(id, updateDefaultValue)); + Assertions.assertTrue( + exception + .getMessage() + .contains( + "The DEFAULT constraint for column is only supported since Hive 3.0, " + + "but the current Gravitino Hive catalog only supports Hive 2.x"), + "The exception message is: " + exception.getMessage()); + // test updateColumnPosition exception Column col1 = Column.of("name", Types.StringType.get(), "comment"); Column col2 = Column.of("address", Types.StringType.get(), "comment"); diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java index 8ffc98491f6..ffa8c2a0cdd 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalog.java @@ -6,6 +6,7 @@ import com.datastrato.gravitino.connector.BaseCatalog; import com.datastrato.gravitino.connector.CatalogOperations; +import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.rel.SupportsSchemas; import com.datastrato.gravitino.rel.TableCatalog; import java.util.Map; @@ -31,6 +32,11 @@ protected CatalogOperations newOps(Map config) { return ops; } + @Override + public Capability newCapability() { + return new IcebergCatalogCapability(); + } + /** @return The Iceberg catalog operations as {@link IcebergCatalogOperations}. */ @Override public SupportsSchemas asSchemas() { diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogCapability.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogCapability.java new file mode 100644 index 00000000000..dbaa85b09d6 --- /dev/null +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogCapability.java @@ -0,0 +1,17 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ +package com.datastrato.gravitino.catalog.lakehouse.iceberg; + +import com.datastrato.gravitino.connector.capability.Capability; +import com.datastrato.gravitino.connector.capability.CapabilityResult; + +public class IcebergCatalogCapability implements Capability { + @Override + public CapabilityResult columnDefaultValue() { + // Iceberg column default value is WIP, see + // https://github.com/apache/iceberg/pull/4525 + return CapabilityResult.unsupported("Iceberg does not support column default value."); + } +} diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java index e3f7a0a1ad3..b3fc85e5883 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/IcebergCatalogOperations.java @@ -481,16 +481,13 @@ public Table createTable( IcebergColumn[] icebergColumns = Arrays.stream(columns) .map( - column -> { - IcebergTableOpsHelper.validateColumnDefaultValue( - column.name(), column.defaultValue()); - return IcebergColumn.builder() - .withName(column.name()) - .withType(column.dataType()) - .withComment(column.comment()) - .withNullable(column.nullable()) - .build(); - }) + column -> + IcebergColumn.builder() + .withName(column.name()) + .withType(column.dataType()) + .withComment(column.comment()) + .withNullable(column.nullable()) + .build()) .toArray(IcebergColumn[]::new); IcebergTable createdTable = diff --git a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/ops/IcebergTableOpsHelper.java b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/ops/IcebergTableOpsHelper.java index 29f4acc64cb..6c87ee98a49 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/ops/IcebergTableOpsHelper.java +++ b/catalogs/catalog-lakehouse-iceberg/src/main/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/ops/IcebergTableOpsHelper.java @@ -7,7 +7,6 @@ import com.datastrato.gravitino.NameIdentifier; import com.datastrato.gravitino.catalog.lakehouse.iceberg.converter.ConvertUtil; -import com.datastrato.gravitino.rel.Column; import com.datastrato.gravitino.rel.TableChange; import com.datastrato.gravitino.rel.TableChange.AddColumn; import com.datastrato.gravitino.rel.TableChange.After; @@ -22,7 +21,6 @@ import com.datastrato.gravitino.rel.TableChange.UpdateColumnPosition; import com.datastrato.gravitino.rel.TableChange.UpdateColumnType; import com.datastrato.gravitino.rel.TableChange.UpdateComment; -import com.datastrato.gravitino.rel.expressions.Expression; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; @@ -193,9 +191,6 @@ private void doAddColumn( parentStruct = icebergTableSchema.asStruct(); } - validateColumnDefaultValue( - String.join(".", addColumn.fieldName()), addColumn.getDefaultValue()); - if (addColumn.isAutoIncrement()) { throw new IllegalArgumentException("Iceberg doesn't support auto increment column"); } @@ -259,8 +254,6 @@ private void alterTableColumn( icebergUpdateSchema, (TableChange.UpdateColumnNullability) change); } else if (change instanceof TableChange.UpdateColumnAutoIncrement) { throw new IllegalArgumentException("Iceberg doesn't support auto increment column"); - } else if (change instanceof TableChange.UpdateColumnDefaultValue) { - throw new IllegalArgumentException("Iceberg doesn't support update column default value"); } else { throw new NotSupportedException( "Iceberg doesn't support " + change.getClass().getSimpleName() + " for now"); @@ -269,14 +262,6 @@ private void alterTableColumn( icebergUpdateSchema.commit(); } - public static void validateColumnDefaultValue(String fieldName, Expression defaultValue) { - // Iceberg column default value is WIP, see - // https://github.com/apache/iceberg/pull/4525 - Preconditions.checkArgument( - defaultValue.equals(Column.DEFAULT_VALUE_NOT_SET), - "Iceberg does not support column default value. Illegal column: " + fieldName); - } - public IcebergTableChange buildIcebergTableChanges( NameIdentifier gravitinoNameIdentifier, TableChange... tableChanges) { diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java index 859093d9b88..c5367f86a34 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/TestIcebergTable.java @@ -4,7 +4,6 @@ */ package com.datastrato.gravitino.catalog.lakehouse.iceberg; -import static com.datastrato.gravitino.rel.expressions.transforms.Transforms.EMPTY_TRANSFORM; import static com.datastrato.gravitino.rel.expressions.transforms.Transforms.bucket; import static com.datastrato.gravitino.rel.expressions.transforms.Transforms.day; import static com.datastrato.gravitino.rel.expressions.transforms.Transforms.identity; @@ -25,7 +24,6 @@ import com.datastrato.gravitino.rel.expressions.NamedReference; import com.datastrato.gravitino.rel.expressions.distributions.Distribution; import com.datastrato.gravitino.rel.expressions.distributions.Distributions; -import com.datastrato.gravitino.rel.expressions.literals.Literals; import com.datastrato.gravitino.rel.expressions.sorts.NullOrdering; import com.datastrato.gravitino.rel.expressions.sorts.SortDirection; import com.datastrato.gravitino.rel.expressions.sorts.SortOrder; @@ -216,31 +214,6 @@ public void testCreateIcebergTable() { Distributions.NONE, sortOrders)); Assertions.assertTrue(exception.getMessage().contains("Table already exists")); - - IcebergColumn withDefaultValue = - IcebergColumn.builder() - .withName("col") - .withType(Types.DateType.get()) - .withComment(ICEBERG_COMMENT) - .withNullable(false) - .withDefaultValue(Literals.NULL) - .build(); - - exception = - Assertions.assertThrows( - IllegalArgumentException.class, - () -> - tableCatalog.createTable( - tableIdentifier, - new Column[] {withDefaultValue}, - ICEBERG_COMMENT, - properties, - EMPTY_TRANSFORM, - Distributions.NONE, - null)); - Assertions.assertTrue( - exception.getMessage().contains("Iceberg does not support column default value"), - "The exception message is: " + exception.getMessage()); } @Test @@ -481,21 +454,6 @@ public void testAlterIcebergTable() { }; Assertions.assertArrayEquals(expected, alteredTable.columns()); - // test add column with default value exception - TableChange withDefaultValue = - TableChange.addColumn( - new String[] {"col_3"}, Types.StringType.get(), "comment", Literals.NULL); - exception = - Assertions.assertThrows( - IllegalArgumentException.class, - () -> - tableCatalog.alterTable( - NameIdentifier.of(tableIdentifier.namespace(), "test_iceberg_table_new"), - withDefaultValue)); - Assertions.assertTrue( - exception.getMessage().contains("Iceberg does not support column default value"), - "The exception message is: " + exception.getMessage()); - // test delete column change icebergCatalog .asTableCatalog() diff --git a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergIT.java b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergIT.java index e2a330b94c4..95e54dbf2ae 100644 --- a/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergIT.java +++ b/catalogs/catalog-lakehouse-iceberg/src/test/java/com/datastrato/gravitino/catalog/lakehouse/iceberg/integration/test/CatalogIcebergIT.java @@ -701,7 +701,8 @@ public void testAlterIcebergTable() { Assertions.assertTrue( illegalArgumentException .getMessage() - .contains("Iceberg doesn't support update column default value")); + .contains("Iceberg does not support column default value. Illegal column: name"), + "The exception is: " + illegalArgumentException.getMessage()); catalog .asTableCatalog() diff --git a/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java b/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java index c53de1003bf..d08587987ba 100644 --- a/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java +++ b/core/src/main/java/com/datastrato/gravitino/catalog/CapabilityHelpers.java @@ -9,6 +9,7 @@ import com.datastrato.gravitino.connector.capability.Capability; import com.datastrato.gravitino.rel.Column; import com.datastrato.gravitino.rel.TableChange; +import com.datastrato.gravitino.rel.expressions.Expression; import com.google.common.base.Preconditions; import java.util.Arrays; @@ -30,6 +31,10 @@ public static TableChange[] applyCapabilities(Capability capabilities, TableChan } else if (change instanceof TableChange.UpdateColumnNullability) { return applyCapabilities( (TableChange.UpdateColumnNullability) change, capabilities); + + } else if (change instanceof TableChange.UpdateColumnDefaultValue) { + return applyCapabilities( + ((TableChange.UpdateColumnDefaultValue) change), capabilities); } return change; }) @@ -72,6 +77,18 @@ private static TableChange applyCapabilities( updateColumnNullability.nullable()); } + private static TableChange applyCapabilities( + TableChange.UpdateColumnDefaultValue updateColumnDefaultValue, Capability capabilities) { + applyColumnDefaultValue( + String.join(".", updateColumnDefaultValue.fieldName()), + updateColumnDefaultValue.getNewDefaultValue(), + capabilities); + + return TableChange.updateColumnDefaultValue( + applyCaseSensitiveOnColumnName(updateColumnDefaultValue.fieldName(), capabilities), + updateColumnDefaultValue.getNewDefaultValue()); + } + private static Column applyCapabilities(Column column, Capability capabilities) { applyColumnNotNull(column, capabilities); applyColumnDefaultValue(column, capabilities); @@ -112,12 +129,14 @@ private static void applyColumnNotNull( } private static void applyColumnDefaultValue(Column column, Capability capabilities) { + applyColumnDefaultValue(column.name(), column.defaultValue(), capabilities); + } + + private static void applyColumnDefaultValue( + String columnName, Expression defaultValue, Capability capabilities) { Preconditions.checkArgument( - capabilities.columnDefaultValue().supported() - || DEFAULT_VALUE_NOT_SET.equals(column.defaultValue()), - capabilities.columnDefaultValue().unsupportedMessage() - + " Illegal column: " - + column.name()); + capabilities.columnDefaultValue().supported() || DEFAULT_VALUE_NOT_SET.equals(defaultValue), + capabilities.columnDefaultValue().unsupportedMessage() + " Illegal column: " + columnName); } private static void applyNameSpecification( diff --git a/core/src/main/java/com/datastrato/gravitino/connector/BaseCatalog.java b/core/src/main/java/com/datastrato/gravitino/connector/BaseCatalog.java index dd0cf713ea1..83ec0b4cd45 100644 --- a/core/src/main/java/com/datastrato/gravitino/connector/BaseCatalog.java +++ b/core/src/main/java/com/datastrato/gravitino/connector/BaseCatalog.java @@ -77,7 +77,7 @@ public abstract class BaseCatalog */ @Evolving protected Capability newCapability() { - return new Capability() {}; + return Capability.DEFAULT; } /** diff --git a/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java b/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java index 326f96867f2..9c6dde58dc3 100644 --- a/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java +++ b/core/src/main/java/com/datastrato/gravitino/connector/capability/Capability.java @@ -13,6 +13,8 @@ @Evolving public interface Capability { + Capability DEFAULT = new DefaultCapability(); + /** The scope of the capability. */ enum Scope { CATALOG, @@ -30,7 +32,7 @@ enum Scope { * @return The check result of the not null constraint. */ default CapabilityResult columnNotNull() { - return CapabilityResult.SUPPORTED; + return DEFAULT.columnNotNull(); } /** @@ -39,7 +41,7 @@ default CapabilityResult columnNotNull() { * @return The check result of the default value. */ default CapabilityResult columnDefaultValue() { - return CapabilityResult.SUPPORTED; + return DEFAULT.columnDefaultValue(); } /** @@ -49,7 +51,7 @@ default CapabilityResult columnDefaultValue() { * @return The capability of the case-sensitive on name. */ default CapabilityResult caseSensitiveOnName(Scope scope) { - return CapabilityResult.SUPPORTED; + return DEFAULT.caseSensitiveOnName(scope); } /** @@ -60,7 +62,7 @@ default CapabilityResult caseSensitiveOnName(Scope scope) { * @return The capability of the specification on name. */ default CapabilityResult specificationOnName(Scope scope, String name) { - return CapabilityResult.SUPPORTED; + return DEFAULT.specificationOnName(scope, name); } /** @@ -70,7 +72,35 @@ default CapabilityResult specificationOnName(Scope scope, String name) { * @return The capability of the managed storage. */ default CapabilityResult managedStorage(Scope scope) { - return CapabilityResult.unsupported( - String.format("The %s entity is not fully managed by Gravitino.", scope)); + return DEFAULT.managedStorage(scope); + } + + /** The default implementation of the capability. */ + class DefaultCapability implements Capability { + @Override + public CapabilityResult columnNotNull() { + return CapabilityResult.SUPPORTED; + } + + @Override + public CapabilityResult columnDefaultValue() { + return CapabilityResult.SUPPORTED; + } + + @Override + public CapabilityResult caseSensitiveOnName(Scope scope) { + return CapabilityResult.SUPPORTED; + } + + @Override + public CapabilityResult specificationOnName(Scope scope, String name) { + return CapabilityResult.SUPPORTED; + } + + @Override + public CapabilityResult managedStorage(Scope scope) { + return CapabilityResult.unsupported( + String.format("The %s entity is not fully managed by Gravitino.", scope)); + } } } From 49b07a2a4d71b4d5e7492fe504582d311fb10da5 Mon Sep 17 00:00:00 2001 From: FANNG Date: Tue, 16 Apr 2024 14:42:36 +0800 Subject: [PATCH 7/7] [#2903]feat(spark-connector): register specific catalog to Spark catalog manager (#2906) ### What changes were proposed in this pull request? register specific catalog like `GravitinoHiveCatalog` to Spark catalog manager, no the general `GravitinoCatalog` ### Why are the changes needed? The specific catalog could implement different interfaces like `FunctionCatalog`, to support Iceberg partition. Fix: #2903 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? existing tests --- .../connector/GravitinoCatalogAdaptor.java | 54 --------- .../GravitinoCatalogAdaptorFactory.java | 27 ----- ...GravitinoCatalog.java => BaseCatalog.java} | 70 +++++++++--- .../catalog/GravitinoCatalogManager.java | 7 +- ...Adaptor.java => GravitinoHiveCatalog.java} | 42 ++++--- ...ptor.java => GravitinoIcebergCatalog.java} | 104 +++++++++--------- .../plugin/GravitinoDriverPlugin.java | 61 +++++++--- .../catalog/TestTransformTableChange.java | 26 ++--- 8 files changed, 191 insertions(+), 200 deletions(-) delete mode 100644 spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptor.java delete mode 100644 spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptorFactory.java rename spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/{GravitinoCatalog.java => BaseCatalog.java} (87%) rename spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/{HiveAdaptor.java => GravitinoHiveCatalog.java} (73%) rename spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/{IcebergAdaptor.java => GravitinoIcebergCatalog.java} (89%) diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptor.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptor.java deleted file mode 100644 index a1a9ab90e94..00000000000 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptor.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2024 Datastrato Pvt Ltd. - * This software is licensed under the Apache License version 2. - */ - -package com.datastrato.gravitino.spark.connector; - -import com.datastrato.gravitino.rel.Table; -import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; -import java.util.Map; -import org.apache.spark.sql.connector.catalog.Identifier; -import org.apache.spark.sql.connector.catalog.TableCatalog; -import org.apache.spark.sql.util.CaseInsensitiveStringMap; - -/** - * GravitinoCatalogAdaptor provides a unified interface for different catalogs to adapt to - * GravitinoCatalog. - */ -public interface GravitinoCatalogAdaptor { - - /** - * Get a PropertiesConverter to transform properties between Gravitino and Spark. - * - * @return an PropertiesConverter - */ - PropertiesConverter getPropertiesConverter(); - - /** - * Create a specific Spark table, combined with gravitinoTable to do DML operations and - * sparkCatalog to do IO operations. - * - * @param identifier Spark's table identifier - * @param gravitinoTable Gravitino table to do DDL operations - * @param sparkCatalog specific Spark catalog to do IO operations - * @param propertiesConverter transform properties between Gravitino and Spark - * @return a specific Spark table - */ - SparkBaseTable createSparkTable( - Identifier identifier, - Table gravitinoTable, - TableCatalog sparkCatalog, - PropertiesConverter propertiesConverter); - - /** - * Create a specific Spark catalog, mainly used to create Spark table. - * - * @param name catalog name - * @param options catalog options from configuration - * @param properties catalog properties from Gravitino - * @return a specific Spark catalog - */ - TableCatalog createAndInitSparkCatalog( - String name, CaseInsensitiveStringMap options, Map properties); -} diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptorFactory.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptorFactory.java deleted file mode 100644 index 0599f5cad1b..00000000000 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/GravitinoCatalogAdaptorFactory.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2024 Datastrato Pvt Ltd. - * This software is licensed under the Apache License version 2. - */ - -package com.datastrato.gravitino.spark.connector; - -import com.datastrato.gravitino.spark.connector.hive.HiveAdaptor; -import com.datastrato.gravitino.spark.connector.iceberg.IcebergAdaptor; -import java.util.Locale; - -/** - * GravitinoCatalogAdaptorFactory creates a specific GravitinoCatalogAdaptor according to the - * catalog provider. - */ -public class GravitinoCatalogAdaptorFactory { - public static GravitinoCatalogAdaptor createGravitinoAdaptor(String provider) { - switch (provider.toLowerCase(Locale.ROOT)) { - case "hive": - return new HiveAdaptor(); - case "lakehouse-iceberg": - return new IcebergAdaptor(); - default: - throw new RuntimeException(String.format("Provider:%s is not supported yet", provider)); - } - } -} diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalog.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java similarity index 87% rename from spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalog.java rename to spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java index e76f7f39939..bd6c26f9241 100644 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalog.java +++ b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/BaseCatalog.java @@ -15,12 +15,11 @@ import com.datastrato.gravitino.rel.SchemaChange; import com.datastrato.gravitino.rel.expressions.literals.Literals; import com.datastrato.gravitino.spark.connector.ConnectorConstants; -import com.datastrato.gravitino.spark.connector.GravitinoCatalogAdaptor; -import com.datastrato.gravitino.spark.connector.GravitinoCatalogAdaptorFactory; import com.datastrato.gravitino.spark.connector.PropertiesConverter; import com.datastrato.gravitino.spark.connector.SparkTransformConverter; import com.datastrato.gravitino.spark.connector.SparkTransformConverter.DistributionAndSortOrdersInfo; import com.datastrato.gravitino.spark.connector.SparkTypeConverter; +import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import java.util.Arrays; @@ -47,11 +46,18 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap; /** - * GravitinoCatalog is the class registered to Spark CatalogManager, it's lazy loaded which means - * Spark connector loads GravitinoCatalog when it's used. It will create different Spark Tables from - * different Gravitino catalogs. + * BaseCatalog acts as the foundational class for Spark CatalogManager registration, enabling + * seamless integration of various data source catalogs within Spark's ecosystem. This class is + * pivotal in bridging Spark with diverse data sources, ensuring a unified approach to data + * management and manipulation across the platform. + * + *

This class implements essential interfaces for the table and namespace management. Subclasses + * can extend BaseCatalog to implement more specific interfaces tailored to the needs of different + * data sources. Its lazy loading design ensures that instances of BaseCatalog are created only when + * needed, optimizing resource utilization and minimizing the overhead associated with + * initialization. */ -public class GravitinoCatalog implements TableCatalog, SupportsNamespaces { +public abstract class BaseCatalog implements TableCatalog, SupportsNamespaces { // The specific Spark catalog to do IO operations, different catalogs have different spark catalog // implementations, like HiveTableCatalog for Hive, JDBCTableCatalog for JDBC, SparkCatalog for // Iceberg. @@ -63,14 +69,46 @@ public class GravitinoCatalog implements TableCatalog, SupportsNamespaces { private final String metalakeName; private String catalogName; private final GravitinoCatalogManager gravitinoCatalogManager; - // Different catalog use GravitinoCatalogAdaptor to adapt to GravitinoCatalog - private GravitinoCatalogAdaptor gravitinoAdaptor; - public GravitinoCatalog() { + protected BaseCatalog() { gravitinoCatalogManager = GravitinoCatalogManager.get(); metalakeName = gravitinoCatalogManager.getMetalakeName(); } + /** + * Create a specific Spark catalog, mainly used to create Spark table. + * + * @param name catalog name + * @param options catalog options from configuration + * @param properties catalog properties from Gravitino + * @return a specific Spark catalog + */ + protected abstract TableCatalog createAndInitSparkCatalog( + String name, CaseInsensitiveStringMap options, Map properties); + + /** + * Create a specific Spark table, combined with gravitinoTable to do DML operations and + * sparkCatalog to do IO operations. + * + * @param identifier Spark's table identifier + * @param gravitinoTable Gravitino table to do DDL operations + * @param sparkCatalog specific Spark catalog to do IO operations + * @param propertiesConverter transform properties between Gravitino and Spark + * @return a specific Spark table + */ + protected abstract SparkBaseTable createSparkTable( + Identifier identifier, + com.datastrato.gravitino.rel.Table gravitinoTable, + TableCatalog sparkCatalog, + PropertiesConverter propertiesConverter); + + /** + * Get a PropertiesConverter to transform properties between Gravitino and Spark. + * + * @return an PropertiesConverter + */ + protected abstract PropertiesConverter getPropertiesConverter(); + @Override public void initialize(String name, CaseInsensitiveStringMap options) { this.catalogName = name; @@ -78,11 +116,9 @@ public void initialize(String name, CaseInsensitiveStringMap options) { String provider = gravitinoCatalogClient.provider(); Preconditions.checkArgument( StringUtils.isNotBlank(provider), name + " catalog provider is empty"); - this.gravitinoAdaptor = GravitinoCatalogAdaptorFactory.createGravitinoAdaptor(provider); this.sparkCatalog = - gravitinoAdaptor.createAndInitSparkCatalog( - name, options, gravitinoCatalogClient.properties()); - this.propertiesConverter = gravitinoAdaptor.getPropertiesConverter(); + createAndInitSparkCatalog(name, options, gravitinoCatalogClient.properties()); + this.propertiesConverter = getPropertiesConverter(); } @Override @@ -147,7 +183,7 @@ public Table createTable( partitionings, distributionAndSortOrdersInfo.getDistribution(), distributionAndSortOrdersInfo.getSortOrders()); - return gravitinoAdaptor.createSparkTable(ident, table, sparkCatalog, propertiesConverter); + return createSparkTable(ident, table, sparkCatalog, propertiesConverter); } catch (NoSuchSchemaException e) { throw new NoSuchNamespaceException(ident.namespace()); } catch (com.datastrato.gravitino.exceptions.TableAlreadyExistsException e) { @@ -164,7 +200,7 @@ public Table loadTable(Identifier ident) throws NoSuchTableException { .asTableCatalog() .loadTable(NameIdentifier.of(metalakeName, catalogName, database, ident.name())); // Will create a catalog specific table - return gravitinoAdaptor.createSparkTable(ident, table, sparkCatalog, propertiesConverter); + return createSparkTable(ident, table, sparkCatalog, propertiesConverter); } catch (com.datastrato.gravitino.exceptions.NoSuchTableException e) { throw new NoSuchTableException(ident); } @@ -182,7 +218,7 @@ public Table createTable( public Table alterTable(Identifier ident, TableChange... changes) throws NoSuchTableException { com.datastrato.gravitino.rel.TableChange[] gravitinoTableChanges = Arrays.stream(changes) - .map(GravitinoCatalog::transformTableChange) + .map(BaseCatalog::transformTableChange) .toArray(com.datastrato.gravitino.rel.TableChange[]::new); try { com.datastrato.gravitino.rel.Table table = @@ -191,7 +227,7 @@ public Table alterTable(Identifier ident, TableChange... changes) throws NoSuchT .alterTable( NameIdentifier.of(metalakeName, catalogName, getDatabase(ident), ident.name()), gravitinoTableChanges); - return gravitinoAdaptor.createSparkTable(ident, table, sparkCatalog, propertiesConverter); + return createSparkTable(ident, table, sparkCatalog, propertiesConverter); } catch (com.datastrato.gravitino.exceptions.NoSuchTableException e) { throw new NoSuchTableException(ident); } diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalogManager.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalogManager.java index e00f63b6203..9884be2d098 100644 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalogManager.java +++ b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/catalog/GravitinoCatalogManager.java @@ -13,9 +13,8 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.util.Arrays; -import java.util.Set; +import java.util.Map; import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,8 +79,8 @@ public void loadRelationalCatalogs() { .forEach(catalog -> gravitinoCatalogs.put(catalog.name(), catalog)); } - public Set getCatalogNames() { - return gravitinoCatalogs.asMap().keySet().stream().collect(Collectors.toSet()); + public Map getCatalogs() { + return gravitinoCatalogs.asMap(); } private Catalog loadCatalog(String catalogName) { diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HiveAdaptor.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/GravitinoHiveCatalog.java similarity index 73% rename from spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HiveAdaptor.java rename to spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/GravitinoHiveCatalog.java index 795c6311aef..64b61754a2e 100644 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HiveAdaptor.java +++ b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/GravitinoHiveCatalog.java @@ -6,9 +6,9 @@ package com.datastrato.gravitino.spark.connector.hive; import com.datastrato.gravitino.rel.Table; -import com.datastrato.gravitino.spark.connector.GravitinoCatalogAdaptor; import com.datastrato.gravitino.spark.connector.GravitinoSparkConfig; import com.datastrato.gravitino.spark.connector.PropertiesConverter; +import com.datastrato.gravitino.spark.connector.catalog.BaseCatalog; import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; import com.google.common.base.Preconditions; import java.util.HashMap; @@ -19,29 +19,13 @@ import org.apache.spark.sql.connector.catalog.TableCatalog; import org.apache.spark.sql.util.CaseInsensitiveStringMap; -/** HiveAdaptor provides specific operations for Hive Catalog to adapt to GravitinoCatalog. */ -public class HiveAdaptor implements GravitinoCatalogAdaptor { +public class GravitinoHiveCatalog extends BaseCatalog { @Override - public PropertiesConverter getPropertiesConverter() { - return new HivePropertiesConverter(); - } - - @Override - public SparkBaseTable createSparkTable( - Identifier identifier, - Table gravitinoTable, - TableCatalog sparkCatalog, - PropertiesConverter propertiesConverter) { - return new SparkHiveTable(identifier, gravitinoTable, sparkCatalog, propertiesConverter); - } - - @Override - public TableCatalog createAndInitSparkCatalog( - String name, CaseInsensitiveStringMap options, Map catalogProperties) { - Preconditions.checkArgument( - catalogProperties != null, "Hive Catalog properties should not be null"); - String metastoreUri = catalogProperties.get(GravitinoSparkConfig.GRAVITINO_HIVE_METASTORE_URI); + protected TableCatalog createAndInitSparkCatalog( + String name, CaseInsensitiveStringMap options, Map properties) { + Preconditions.checkArgument(properties != null, "Hive Catalog properties should not be null"); + String metastoreUri = properties.get(GravitinoSparkConfig.GRAVITINO_HIVE_METASTORE_URI); Preconditions.checkArgument( StringUtils.isNotBlank(metastoreUri), "Couldn't get " @@ -55,4 +39,18 @@ public TableCatalog createAndInitSparkCatalog( return hiveCatalog; } + + @Override + protected SparkBaseTable createSparkTable( + Identifier identifier, + Table gravitinoTable, + TableCatalog sparkCatalog, + PropertiesConverter propertiesConverter) { + return new SparkHiveTable(identifier, gravitinoTable, sparkCatalog, propertiesConverter); + } + + @Override + protected PropertiesConverter getPropertiesConverter() { + return new HivePropertiesConverter(); + } } diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/IcebergAdaptor.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java similarity index 89% rename from spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/IcebergAdaptor.java rename to spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java index cf73dfb0427..e3b9783d41e 100644 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/IcebergAdaptor.java +++ b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java @@ -6,8 +6,8 @@ package com.datastrato.gravitino.spark.connector.iceberg; import com.datastrato.gravitino.rel.Table; -import com.datastrato.gravitino.spark.connector.GravitinoCatalogAdaptor; import com.datastrato.gravitino.spark.connector.PropertiesConverter; +import com.datastrato.gravitino.spark.connector.catalog.BaseCatalog; import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; import com.google.common.base.Preconditions; import java.util.HashMap; @@ -19,8 +19,60 @@ import org.apache.spark.sql.connector.catalog.TableCatalog; import org.apache.spark.sql.util.CaseInsensitiveStringMap; -/** IcebergAdaptor provides specific operations for Iceberg Catalog to adapt to GravitinoCatalog. */ -public class IcebergAdaptor implements GravitinoCatalogAdaptor { +/** + * The GravitinoIcebergCatalog class extends the BaseCatalog to integrate with the Iceberg table + * format, providing specialized support for Iceberg-specific functionalities within Spark's + * ecosystem. This implementation can further adapt to specific interfaces such as + * StagingTableCatalog and FunctionCatalog, allowing for advanced operations like table staging and + * function management tailored to the needs of Iceberg tables. + */ +public class GravitinoIcebergCatalog extends BaseCatalog { + + @Override + protected TableCatalog createAndInitSparkCatalog( + String name, CaseInsensitiveStringMap options, Map properties) { + Preconditions.checkArgument( + properties != null, "Iceberg Catalog properties should not be null"); + + String catalogBackend = + properties.get(IcebergPropertiesConstants.GRAVITINO_ICEBERG_CATALOG_BACKEND); + Preconditions.checkArgument( + StringUtils.isNotBlank(catalogBackend), "Iceberg Catalog backend should not be empty."); + + HashMap all = new HashMap<>(options); + + switch (catalogBackend.toLowerCase(Locale.ENGLISH)) { + case IcebergPropertiesConstants.GRAVITINO_ICEBERG_CATALOG_BACKEND_HIVE: + initHiveProperties(catalogBackend, properties, all); + break; + case IcebergPropertiesConstants.GRAVITINO_ICEBERG_CATALOG_BACKEND_JDBC: + initJdbcProperties(catalogBackend, properties, all); + break; + default: + // SparkCatalog does not support Memory type catalog + throw new IllegalArgumentException( + "Unsupported Iceberg Catalog backend: " + catalogBackend); + } + + TableCatalog icebergCatalog = new SparkCatalog(); + icebergCatalog.initialize(name, new CaseInsensitiveStringMap(all)); + + return icebergCatalog; + } + + @Override + protected SparkBaseTable createSparkTable( + Identifier identifier, + Table gravitinoTable, + TableCatalog sparkCatalog, + PropertiesConverter propertiesConverter) { + return new SparkIcebergTable(identifier, gravitinoTable, sparkCatalog, propertiesConverter); + } + + @Override + protected PropertiesConverter getPropertiesConverter() { + return new IcebergPropertiesConverter(); + } private void initHiveProperties( String catalogBackend, @@ -96,50 +148,4 @@ private void initJdbcProperties( icebergProperties.put(IcebergPropertiesConstants.GRAVITINO_ICEBERG_JDBC_PASSWORD, jdbcPassword); icebergProperties.put(IcebergPropertiesConstants.GRAVITINO_ICEBERG_JDBC_DRIVER, jdbcDriver); } - - @Override - public PropertiesConverter getPropertiesConverter() { - return new IcebergPropertiesConverter(); - } - - @Override - public SparkBaseTable createSparkTable( - Identifier identifier, - Table gravitinoTable, - TableCatalog sparkCatalog, - PropertiesConverter propertiesConverter) { - return new SparkIcebergTable(identifier, gravitinoTable, sparkCatalog, propertiesConverter); - } - - @Override - public TableCatalog createAndInitSparkCatalog( - String name, CaseInsensitiveStringMap options, Map properties) { - Preconditions.checkArgument( - properties != null, "Iceberg Catalog properties should not be null"); - - String catalogBackend = - properties.get(IcebergPropertiesConstants.GRAVITINO_ICEBERG_CATALOG_BACKEND); - Preconditions.checkArgument( - StringUtils.isNotBlank(catalogBackend), "Iceberg Catalog backend should not be empty."); - - HashMap all = new HashMap<>(options); - - switch (catalogBackend.toLowerCase(Locale.ENGLISH)) { - case IcebergPropertiesConstants.GRAVITINO_ICEBERG_CATALOG_BACKEND_HIVE: - initHiveProperties(catalogBackend, properties, all); - break; - case IcebergPropertiesConstants.GRAVITINO_ICEBERG_CATALOG_BACKEND_JDBC: - initJdbcProperties(catalogBackend, properties, all); - break; - default: - // SparkCatalog does not support Memory type catalog - throw new IllegalArgumentException( - "Unsupported Iceberg Catalog backend: " + catalogBackend); - } - - TableCatalog icebergCatalog = new SparkCatalog(); - icebergCatalog.initialize(name, new CaseInsensitiveStringMap(all)); - - return icebergCatalog; - } } diff --git a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/plugin/GravitinoDriverPlugin.java b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/plugin/GravitinoDriverPlugin.java index 88235c3877e..3f830de2cdc 100644 --- a/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/plugin/GravitinoDriverPlugin.java +++ b/spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/plugin/GravitinoDriverPlugin.java @@ -5,13 +5,15 @@ package com.datastrato.gravitino.spark.connector.plugin; +import com.datastrato.gravitino.Catalog; import com.datastrato.gravitino.spark.connector.GravitinoSparkConfig; -import com.datastrato.gravitino.spark.connector.catalog.GravitinoCatalog; import com.datastrato.gravitino.spark.connector.catalog.GravitinoCatalogManager; +import com.datastrato.gravitino.spark.connector.hive.GravitinoHiveCatalog; +import com.datastrato.gravitino.spark.connector.iceberg.GravitinoIcebergCatalog; import com.google.common.base.Preconditions; import java.util.Collections; +import java.util.Locale; import java.util.Map; -import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; @@ -45,8 +47,7 @@ public Map init(SparkContext sc, PluginContext pluginContext) { catalogManager = GravitinoCatalogManager.create(gravitinoUri, metalake); catalogManager.loadRelationalCatalogs(); - Set catalogNames = catalogManager.getCatalogNames(); - registerGravitinoCatalogs(conf, catalogNames); + registerGravitinoCatalogs(conf, catalogManager.getCatalogs()); registerSqlExtensions(); return Collections.emptyMap(); } @@ -58,16 +59,48 @@ public void shutdown() { } } - private void registerGravitinoCatalogs(SparkConf sparkConf, Set catalogNames) { - catalogNames.forEach( - catalogName -> { - String sparkCatalogConfigName = "spark.sql.catalog." + catalogName; - Preconditions.checkArgument( - !sparkConf.contains(sparkCatalogConfigName), - catalogName + " is already registered to SparkCatalogManager"); - sparkConf.set(sparkCatalogConfigName, GravitinoCatalog.class.getName()); - LOG.info("Register {} catalog to Spark catalog manager", catalogName); - }); + private void registerGravitinoCatalogs( + SparkConf sparkConf, Map gravitinoCatalogs) { + gravitinoCatalogs + .entrySet() + .forEach( + entry -> { + String catalogName = entry.getKey(); + Catalog gravitinoCatalog = entry.getValue(); + String provider = gravitinoCatalog.provider(); + try { + registerCatalog(sparkConf, catalogName, provider); + } catch (Exception e) { + LOG.warn("Register catalog {} failed.", catalogName, e); + } + }); + } + + private void registerCatalog(SparkConf sparkConf, String catalogName, String provider) { + if (StringUtils.isBlank(provider)) { + LOG.warn("Skip registering {} because catalog provider is empty.", catalogName); + return; + } + + String catalogClassName; + switch (provider.toLowerCase(Locale.ROOT)) { + case "hive": + catalogClassName = GravitinoHiveCatalog.class.getName(); + break; + case "lakehouse-iceberg": + catalogClassName = GravitinoIcebergCatalog.class.getName(); + break; + default: + LOG.warn("Skip registering {} because {} is not supported yet.", catalogName, provider); + return; + } + + String sparkCatalogConfigName = "spark.sql.catalog." + catalogName; + Preconditions.checkArgument( + !sparkConf.contains(sparkCatalogConfigName), + catalogName + " is already registered to SparkCatalogManager"); + sparkConf.set(sparkCatalogConfigName, catalogClassName); + LOG.info("Register {} catalog to Spark catalog manager.", catalogName); } // Todo inject Iceberg extensions diff --git a/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/catalog/TestTransformTableChange.java b/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/catalog/TestTransformTableChange.java index e5e85b6b785..5a14a65aa14 100644 --- a/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/catalog/TestTransformTableChange.java +++ b/spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/catalog/TestTransformTableChange.java @@ -19,7 +19,7 @@ public class TestTransformTableChange { void testTransformSetProperty() { TableChange sparkSetProperty = TableChange.setProperty("key", "value"); com.datastrato.gravitino.rel.TableChange tableChange = - GravitinoCatalog.transformTableChange(sparkSetProperty); + BaseCatalog.transformTableChange(sparkSetProperty); Assertions.assertTrue( tableChange instanceof com.datastrato.gravitino.rel.TableChange.SetProperty); com.datastrato.gravitino.rel.TableChange.SetProperty gravitinoSetProperty = @@ -32,7 +32,7 @@ void testTransformSetProperty() { void testTransformRemoveProperty() { TableChange sparkRemoveProperty = TableChange.removeProperty("key"); com.datastrato.gravitino.rel.TableChange tableChange = - GravitinoCatalog.transformTableChange(sparkRemoveProperty); + BaseCatalog.transformTableChange(sparkRemoveProperty); Assertions.assertTrue( tableChange instanceof com.datastrato.gravitino.rel.TableChange.RemoveProperty); com.datastrato.gravitino.rel.TableChange.RemoveProperty gravitinoRemoveProperty = @@ -48,7 +48,7 @@ void testTransformRenameColumn() { TableChange.RenameColumn sparkRenameColumn = (TableChange.RenameColumn) TableChange.renameColumn(oldFiledsName, newFiledName); com.datastrato.gravitino.rel.TableChange gravitinoChange = - GravitinoCatalog.transformTableChange(sparkRenameColumn); + BaseCatalog.transformTableChange(sparkRenameColumn); Assertions.assertTrue( gravitinoChange instanceof com.datastrato.gravitino.rel.TableChange.RenameColumn); @@ -67,7 +67,7 @@ void testTransformUpdateColumnComment() { TableChange.UpdateColumnComment updateColumnComment = (TableChange.UpdateColumnComment) TableChange.updateColumnComment(fieldNames, newComment); com.datastrato.gravitino.rel.TableChange gravitinoChange = - GravitinoCatalog.transformTableChange(updateColumnComment); + BaseCatalog.transformTableChange(updateColumnComment); Assertions.assertTrue( gravitinoChange instanceof com.datastrato.gravitino.rel.TableChange.UpdateColumnComment); @@ -92,7 +92,7 @@ void testTransformAddColumn() { TableChange.addColumn( new String[] {"col1"}, DataTypes.StringType, true, "", first, defaultValue); com.datastrato.gravitino.rel.TableChange gravitinoChangeFirst = - GravitinoCatalog.transformTableChange(sparkAddColumnFirst); + BaseCatalog.transformTableChange(sparkAddColumnFirst); Assertions.assertTrue( gravitinoChangeFirst instanceof com.datastrato.gravitino.rel.TableChange.AddColumn); @@ -112,7 +112,7 @@ void testTransformAddColumn() { TableChange.addColumn( new String[] {"col1"}, DataTypes.StringType, true, "", after, defaultValue); com.datastrato.gravitino.rel.TableChange gravitinoChangeAfter = - GravitinoCatalog.transformTableChange(sparkAddColumnAfter); + BaseCatalog.transformTableChange(sparkAddColumnAfter); Assertions.assertTrue( gravitinoChangeAfter instanceof com.datastrato.gravitino.rel.TableChange.AddColumn); @@ -132,7 +132,7 @@ void testTransformAddColumn() { TableChange.addColumn( new String[] {"col1"}, DataTypes.StringType, true, "", null, defaultValue); com.datastrato.gravitino.rel.TableChange gravitinoChangeDefault = - GravitinoCatalog.transformTableChange(sparkAddColumnDefault); + BaseCatalog.transformTableChange(sparkAddColumnDefault); Assertions.assertTrue( gravitinoChangeDefault instanceof com.datastrato.gravitino.rel.TableChange.AddColumn); @@ -153,7 +153,7 @@ void testTransformDeleteColumn() { TableChange.DeleteColumn sparkDeleteColumn = (TableChange.DeleteColumn) TableChange.deleteColumn(new String[] {"col1"}, true); com.datastrato.gravitino.rel.TableChange gravitinoChange = - GravitinoCatalog.transformTableChange(sparkDeleteColumn); + BaseCatalog.transformTableChange(sparkDeleteColumn); Assertions.assertTrue( gravitinoChange instanceof com.datastrato.gravitino.rel.TableChange.DeleteColumn); @@ -170,7 +170,7 @@ void testTransformUpdateColumnType() { (TableChange.UpdateColumnType) TableChange.updateColumnType(new String[] {"col1"}, DataTypes.StringType); com.datastrato.gravitino.rel.TableChange gravitinoChange = - GravitinoCatalog.transformTableChange(sparkUpdateColumnType); + BaseCatalog.transformTableChange(sparkUpdateColumnType); Assertions.assertTrue( gravitinoChange instanceof com.datastrato.gravitino.rel.TableChange.UpdateColumnType); @@ -192,7 +192,7 @@ void testTransformUpdateColumnPosition() { (TableChange.UpdateColumnPosition) TableChange.updateColumnPosition(new String[] {"col1"}, first); com.datastrato.gravitino.rel.TableChange gravitinoChangeFirst = - GravitinoCatalog.transformTableChange(sparkUpdateColumnFirst); + BaseCatalog.transformTableChange(sparkUpdateColumnFirst); Assertions.assertTrue( gravitinoChangeFirst @@ -210,7 +210,7 @@ void testTransformUpdateColumnPosition() { (TableChange.UpdateColumnPosition) TableChange.updateColumnPosition(new String[] {"col1"}, after); com.datastrato.gravitino.rel.TableChange gravitinoChangeAfter = - GravitinoCatalog.transformTableChange(sparkUpdateColumnAfter); + BaseCatalog.transformTableChange(sparkUpdateColumnAfter); Assertions.assertTrue( gravitinoChangeAfter @@ -231,7 +231,7 @@ void testTransformUpdateColumnNullability() { (TableChange.UpdateColumnNullability) TableChange.updateColumnNullability(new String[] {"col1"}, true); com.datastrato.gravitino.rel.TableChange gravitinoChange = - GravitinoCatalog.transformTableChange(sparkUpdateColumnNullability); + BaseCatalog.transformTableChange(sparkUpdateColumnNullability); Assertions.assertTrue( gravitinoChange @@ -255,7 +255,7 @@ void testUpdateColumnDefaultValue() { TableChange.updateColumnDefaultValue(fieldNames, newDedauleValue); com.datastrato.gravitino.rel.TableChange gravitinoChange = - GravitinoCatalog.transformTableChange(sparkUpdateColumnDefaultValue); + BaseCatalog.transformTableChange(sparkUpdateColumnDefaultValue); Assertions.assertTrue( gravitinoChange