Skip to content

Commit

Permalink
Merge branch 'iceberg-load-properties' of github.com:caican00/graviti…
Browse files Browse the repository at this point in the history
…no into iceberg-load-properties
  • Loading branch information
caican00 committed Apr 16, 2024
2 parents bc96f6b + 575e4ce commit 56d0e4d
Show file tree
Hide file tree
Showing 77 changed files with 2,562 additions and 431 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,16 @@
*/
package com.datastrato.gravitino.authorization;

import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;

/** The helper class for {@link SecurableObject}. */
public class SecurableObjects {

private static final Splitter DOT = Splitter.on('.');

/**
* Create the {@link SecurableObject} with the given names.
*
Expand Down Expand Up @@ -186,4 +191,23 @@ public boolean equals(Object other) {
&& Objects.equals(name, otherSecurableObject.name());
}
}

/**
* Create a {@link SecurableObject} from the given identifier string.
*
* @param securableObjectIdentifier The identifier string
* @return The created {@link SecurableObject}
*/
public static SecurableObject parse(String securableObjectIdentifier) {
if ("*".equals(securableObjectIdentifier)) {
return SecurableObjects.ofAllCatalogs();
}

if (StringUtils.isBlank(securableObjectIdentifier)) {
throw new IllegalArgumentException("securable object identifier can't be blank");
}

Iterable<String> parts = DOT.split(securableObjectIdentifier);
return SecurableObjects.of(Iterables.toArray(parts, String.class));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,13 @@ public CapabilityResult columnNotNull() {
"The NOT NULL constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x.");
}

@Override
public CapabilityResult columnDefaultValue() {
// The DEFAULT constraint for column is supported since Hive3.0, see
// https://issues.apache.org/jira/browse/HIVE-18726
return CapabilityResult.unsupported(
"The DEFAULT constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x.");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import com.datastrato.gravitino.rel.Table;
import com.datastrato.gravitino.rel.TableCatalog;
import com.datastrato.gravitino.rel.TableChange;
import com.datastrato.gravitino.rel.expressions.Expression;
import com.datastrato.gravitino.rel.expressions.NamedReference;
import com.datastrato.gravitino.rel.expressions.distributions.Distribution;
import com.datastrato.gravitino.rel.expressions.distributions.Distributions;
Expand Down Expand Up @@ -591,11 +590,6 @@ private void validateColumnChangeForAlter(
|| !partitionFields.contains(fieldToAdd),
"Cannot alter partition column: " + fieldToAdd);

if (c instanceof TableChange.UpdateColumnDefaultValue) {
throw new IllegalArgumentException(
"Hive does not support altering column default value");
}

if (c instanceof TableChange.UpdateColumnPosition
&& afterPartitionColumn(
partitionFields, ((TableChange.UpdateColumnPosition) c).getPosition())) {
Expand Down Expand Up @@ -682,12 +676,6 @@ public Table createTable(
validatePartitionForCreate(columns, partitioning);
validateDistributionAndSort(distribution, sortOrders);

Arrays.stream(columns)
.forEach(
c -> {
validateColumnDefaultValue(c.name(), c.defaultValue());
});

TableType tableType = (TableType) tablePropertiesMetadata.getOrDefault(properties, TABLE_TYPE);
Preconditions.checkArgument(
SUPPORT_TABLE_TYPES.contains(tableType.name()),
Expand Down Expand Up @@ -784,8 +772,6 @@ public Table alterTable(NameIdentifier tableIdent, TableChange... changes)

if (change instanceof TableChange.AddColumn) {
TableChange.AddColumn addColumn = (TableChange.AddColumn) change;
String fieldName = String.join(".", addColumn.fieldName());
validateColumnDefaultValue(fieldName, addColumn.getDefaultValue());
doAddColumn(cols, addColumn);

} else if (change instanceof TableChange.DeleteColumn) {
Expand All @@ -803,10 +789,6 @@ public Table alterTable(NameIdentifier tableIdent, TableChange... changes)
} else if (change instanceof TableChange.UpdateColumnType) {
doUpdateColumnType(cols, (TableChange.UpdateColumnType) change);

} else if (change instanceof TableChange.UpdateColumnDefaultValue) {
throw new IllegalArgumentException(
"Hive does not support altering column default value");

} else if (change instanceof TableChange.UpdateColumnAutoIncrement) {
throw new IllegalArgumentException(
"Hive does not support altering column auto increment");
Expand Down Expand Up @@ -854,17 +836,6 @@ public Table alterTable(NameIdentifier tableIdent, TableChange... changes)
}
}

private void validateColumnDefaultValue(String fieldName, Expression defaultValue) {
// The DEFAULT constraint for column is supported since Hive3.0, see
// https://issues.apache.org/jira/browse/HIVE-18726
if (!defaultValue.equals(Column.DEFAULT_VALUE_NOT_SET)) {
throw new IllegalArgumentException(
"The DEFAULT constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x. Illegal column: "
+ fieldName);
}
}

private int columnPosition(List<FieldSchema> columns, TableChange.ColumnPosition position) {
Preconditions.checkArgument(position != null, "Column position cannot be null");
if (position instanceof TableChange.After) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,11 @@
import com.datastrato.gravitino.rel.expressions.NamedReference;
import com.datastrato.gravitino.rel.expressions.distributions.Distribution;
import com.datastrato.gravitino.rel.expressions.distributions.Distributions;
import com.datastrato.gravitino.rel.expressions.literals.Literals;
import com.datastrato.gravitino.rel.expressions.sorts.NullOrdering;
import com.datastrato.gravitino.rel.expressions.sorts.SortDirection;
import com.datastrato.gravitino.rel.expressions.sorts.SortOrder;
import com.datastrato.gravitino.rel.expressions.sorts.SortOrders;
import com.datastrato.gravitino.rel.expressions.transforms.Transform;
import com.datastrato.gravitino.rel.expressions.transforms.Transforms;
import com.datastrato.gravitino.rel.types.Types;
import com.google.common.collect.Maps;
import java.time.Instant;
Expand Down Expand Up @@ -201,34 +199,6 @@ public void testCreateHiveTable() {
distribution,
sortOrders));
Assertions.assertTrue(exception.getMessage().contains("Table already exists"));

HiveColumn withDefault =
HiveColumn.builder()
.withName("col_3")
.withType(Types.ByteType.get())
.withComment(HIVE_COMMENT)
.withNullable(true)
.withDefaultValue(Literals.NULL)
.build();
exception =
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
tableCatalog.createTable(
tableIdentifier,
new Column[] {withDefault},
HIVE_COMMENT,
properties,
Transforms.EMPTY_TRANSFORM,
distribution,
sortOrders));
Assertions.assertTrue(
exception
.getMessage()
.contains(
"The DEFAULT constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x"),
"The exception message is: " + exception.getMessage());
}

@Test
Expand Down Expand Up @@ -447,32 +417,6 @@ public void testAlterHiveTable() {
() -> tableCatalog.alterTable(tableIdentifier, tableChange6));
Assertions.assertTrue(exception.getMessage().contains("Cannot add column with duplicate name"));

TableChange tableChange8 =
TableChange.addColumn(
new String[] {"col_3"}, Types.ByteType.get(), "comment", Literals.NULL);
exception =
Assertions.assertThrows(
IllegalArgumentException.class,
() -> tableCatalog.alterTable(tableIdentifier, tableChange8));
Assertions.assertTrue(
exception
.getMessage()
.contains(
"The DEFAULT constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x"),
"The exception message is: " + exception.getMessage());

TableChange tableChange9 =
TableChange.updateColumnDefaultValue(
new String[] {"col_1"}, Literals.of("0", Types.ByteType.get()));
exception =
Assertions.assertThrows(
IllegalArgumentException.class,
() -> tableCatalog.alterTable(tableIdentifier, tableChange9));

Assertions.assertEquals(
"Hive does not support altering column default value", exception.getMessage());

// test alter
tableCatalog.alterTable(
tableIdentifier,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -507,6 +507,30 @@ public void testCreateHiveTable() throws TException, InterruptedException {
.contains(
"The NOT NULL constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x"));

// test column default value
Column withDefault =
Column.of(
"default_column", Types.StringType.get(), "default column", true, false, Literals.NULL);
exception =
assertThrows(
IllegalArgumentException.class,
() ->
catalog
.asTableCatalog()
.createTable(
nameIdentifier,
new Column[] {withDefault},
TABLE_COMMENT,
properties,
Transforms.EMPTY_TRANSFORM));
Assertions.assertTrue(
exception
.getMessage()
.contains(
"The DEFAULT constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x"),
"The exception message is: " + exception.getMessage());
}

@Test
Expand Down Expand Up @@ -1134,6 +1158,20 @@ public void testAlterHiveTable() throws TException, InterruptedException {
"The NOT NULL constraint for column is only supported since Hive 3.0,"
+ " but the current Gravitino Hive catalog only supports Hive 2.x. Illegal column: hive_col_name1"));

// test update column default value exception
TableChange updateDefaultValue =
TableChange.updateColumnDefaultValue(new String[] {HIVE_COL_NAME1}, Literals.NULL);
exception =
assertThrows(
IllegalArgumentException.class, () -> tableCatalog.alterTable(id, updateDefaultValue));
Assertions.assertTrue(
exception
.getMessage()
.contains(
"The DEFAULT constraint for column is only supported since Hive 3.0, "
+ "but the current Gravitino Hive catalog only supports Hive 2.x"),
"The exception message is: " + exception.getMessage());

// test updateColumnPosition exception
Column col1 = Column.of("name", Types.StringType.get(), "comment");
Column col2 = Column.of("address", Types.StringType.get(), "comment");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import com.datastrato.gravitino.connector.BaseCatalog;
import com.datastrato.gravitino.connector.CatalogOperations;
import com.datastrato.gravitino.connector.capability.Capability;
import com.datastrato.gravitino.rel.SupportsSchemas;
import com.datastrato.gravitino.rel.TableCatalog;
import java.util.Map;
Expand All @@ -31,6 +32,11 @@ protected CatalogOperations newOps(Map<String, String> config) {
return ops;
}

@Override
public Capability newCapability() {
return new IcebergCatalogCapability();
}

/** @return The Iceberg catalog operations as {@link IcebergCatalogOperations}. */
@Override
public SupportsSchemas asSchemas() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/*
* Copyright 2024 Datastrato Pvt Ltd.
* This software is licensed under the Apache License version 2.
*/
package com.datastrato.gravitino.catalog.lakehouse.iceberg;

import com.datastrato.gravitino.connector.capability.Capability;
import com.datastrato.gravitino.connector.capability.CapabilityResult;

public class IcebergCatalogCapability implements Capability {
@Override
public CapabilityResult columnDefaultValue() {
// Iceberg column default value is WIP, see
// https://github.com/apache/iceberg/pull/4525
return CapabilityResult.unsupported("Iceberg does not support column default value.");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -481,16 +481,13 @@ public Table createTable(
IcebergColumn[] icebergColumns =
Arrays.stream(columns)
.map(
column -> {
IcebergTableOpsHelper.validateColumnDefaultValue(
column.name(), column.defaultValue());
return IcebergColumn.builder()
.withName(column.name())
.withType(column.dataType())
.withComment(column.comment())
.withNullable(column.nullable())
.build();
})
column ->
IcebergColumn.builder()
.withName(column.name())
.withType(column.dataType())
.withComment(column.comment())
.withNullable(column.nullable())
.build())
.toArray(IcebergColumn[]::new);

IcebergTable createdTable =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import com.datastrato.gravitino.NameIdentifier;
import com.datastrato.gravitino.catalog.lakehouse.iceberg.converter.ConvertUtil;
import com.datastrato.gravitino.rel.Column;
import com.datastrato.gravitino.rel.TableChange;
import com.datastrato.gravitino.rel.TableChange.AddColumn;
import com.datastrato.gravitino.rel.TableChange.After;
Expand All @@ -22,7 +21,6 @@
import com.datastrato.gravitino.rel.TableChange.UpdateColumnPosition;
import com.datastrato.gravitino.rel.TableChange.UpdateColumnType;
import com.datastrato.gravitino.rel.TableChange.UpdateComment;
import com.datastrato.gravitino.rel.expressions.Expression;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
Expand Down Expand Up @@ -193,9 +191,6 @@ private void doAddColumn(
parentStruct = icebergTableSchema.asStruct();
}

validateColumnDefaultValue(
String.join(".", addColumn.fieldName()), addColumn.getDefaultValue());

if (addColumn.isAutoIncrement()) {
throw new IllegalArgumentException("Iceberg doesn't support auto increment column");
}
Expand Down Expand Up @@ -259,8 +254,6 @@ private void alterTableColumn(
icebergUpdateSchema, (TableChange.UpdateColumnNullability) change);
} else if (change instanceof TableChange.UpdateColumnAutoIncrement) {
throw new IllegalArgumentException("Iceberg doesn't support auto increment column");
} else if (change instanceof TableChange.UpdateColumnDefaultValue) {
throw new IllegalArgumentException("Iceberg doesn't support update column default value");
} else {
throw new NotSupportedException(
"Iceberg doesn't support " + change.getClass().getSimpleName() + " for now");
Expand All @@ -269,14 +262,6 @@ private void alterTableColumn(
icebergUpdateSchema.commit();
}

public static void validateColumnDefaultValue(String fieldName, Expression defaultValue) {
// Iceberg column default value is WIP, see
// https://github.com/apache/iceberg/pull/4525
Preconditions.checkArgument(
defaultValue.equals(Column.DEFAULT_VALUE_NOT_SET),
"Iceberg does not support column default value. Illegal column: " + fieldName);
}

public IcebergTableChange buildIcebergTableChanges(
NameIdentifier gravitinoNameIdentifier, TableChange... tableChanges) {

Expand Down
Loading

0 comments on commit 56d0e4d

Please sign in to comment.