Skip to content

Commit

Permalink
xx
Browse files Browse the repository at this point in the history
  • Loading branch information
FANNG1 committed Mar 25, 2024
1 parent 4d4fa44 commit 2d7c2ca
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
package com.datastrato.gravitino.integration.test.util.spark;

import com.datastrato.gravitino.spark.connector.ConnectorConstants;
import com.datastrato.gravitino.spark.connector.hive.HivePropertyConstants;
import com.datastrato.gravitino.spark.connector.table.SparkBaseTable;
import java.util.ArrayList;
import java.util.Arrays;
Expand All @@ -18,6 +17,7 @@
import javax.ws.rs.NotSupportedException;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.connector.catalog.TableCatalog;
import org.apache.spark.sql.connector.expressions.BucketTransform;
import org.apache.spark.sql.connector.expressions.IdentityTransform;
import org.apache.spark.sql.connector.expressions.SortedBucketTransform;
Expand Down Expand Up @@ -45,7 +45,7 @@ public String getTableName() {
}

public String getTableLocation() {
return tableProperties.get(HivePropertyConstants.GRAVITINO_HIVE_LOCATION);
return tableProperties.get(TableCatalog.PROP_LOCATION);
}

// Include database name and table name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@

package com.datastrato.gravitino.spark.connector;

import com.datastrato.gravitino.spark.connector.hive.HivePropertyConstants;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.spark.sql.connector.catalog.TableCatalog;

/** Transform table properties between Gravitino and Spark. */
public interface PropertiesConverter {
Expand All @@ -22,8 +22,8 @@ static Map<String, String> transformOptionProperties(Map<String, String> propert
Collectors.toMap(
entry -> {
String key = entry.getKey();
if (key.startsWith(HivePropertyConstants.SPARK_OPTION_PREFIX)) {
return key.substring(HivePropertyConstants.SPARK_OPTION_PREFIX.length());
if (key.startsWith(TableCatalog.OPTION_PREFIX)) {
return key.substring(TableCatalog.OPTION_PREFIX.length());
} else {
return key;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import java.util.Map;
import java.util.Optional;
import javax.ws.rs.NotSupportedException;
import org.apache.spark.sql.connector.catalog.TableCatalog;

/** Transform hive catalog properties between Spark and Gravitino. */
public class HivePropertiesConverter implements PropertiesConverter {
Expand Down Expand Up @@ -46,15 +47,17 @@ public class HivePropertiesConverter implements PropertiesConverter {
public Map<String, String> toGravitinoTableProperties(Map<String, String> properties) {
Map<String, String> gravitinoTableProperties =
PropertiesConverter.transformOptionProperties(properties);
String provider = gravitinoTableProperties.get(HivePropertyConstants.SPARK_PROVIDER);
String storeAs = gravitinoTableProperties.remove(HivePropertyConstants.SPARK_HIVE_STORED_AS);
String sparkFormat = Optional.ofNullable(storeAs).orElse(provider);
if (sparkFormat != null) {
String gravitinoFormat = hiveTableFormatMap.get(sparkFormat.toLowerCase(Locale.ROOT));
String provider = gravitinoTableProperties.get(TableCatalog.PROP_PROVIDER);
String storeAs = gravitinoTableProperties.get(HivePropertyConstants.SPARK_HIVE_STORED_AS);
String sparkHiveTableFormat = Optional.ofNullable(storeAs).orElse(provider);
if (sparkHiveTableFormat != null) {
String gravitinoFormat =
hiveTableFormatMap.get(sparkHiveTableFormat.toLowerCase(Locale.ROOT));
if (gravitinoFormat != null) {
gravitinoTableProperties.put(HivePropertyConstants.GRAVITINO_HIVE_FORMAT, gravitinoFormat);
} else {
throw new NotSupportedException("Doesn't support spark format: " + sparkFormat);
throw new NotSupportedException(
"Doesn't support spark hive table format: " + sparkHiveTableFormat);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,10 @@

public class HivePropertyConstants {
public static final String GRAVITINO_HIVE_FORMAT = "format";
public static final String GRAVITINO_HIVE_LOCATION = "location";
public static final String GRAVITINO_HIVE_TABLE_TYPE = "table-type";
public static final String GRAVITINO_HIVE_INPUT_FORMAT = "input-format";
public static final String GRAVITINO_HIVE_OUTPUT_FORMAT = "output-format";
public static final String GRAVITINO_HIVE_SERDE_LIB = "serde-lib";

public static final String SPARK_PROVIDER = "provider";
public static final String SPARK_OPTION_PREFIX = "option.";
public static final String SPARK_HIVE_STORED_AS = "hive.stored-as";
public static final String SPARK_HIVE_INPUT_FORMAT = "input-format";
public static final String SPARK_HIVE_OUTPUT_FORMAT = "output-format";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import javax.ws.rs.NotSupportedException;
import org.apache.spark.sql.connector.catalog.TableCatalog;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
Expand All @@ -34,14 +35,14 @@ void testTableFormat() {
// using
hiveProperties =
hivePropertiesConverter.toGravitinoTableProperties(
ImmutableMap.of(HivePropertyConstants.SPARK_PROVIDER, "PARQUET"));
ImmutableMap.of(TableCatalog.PROP_PROVIDER, "PARQUET"));
Assertions.assertEquals(
hiveProperties.get(HivePropertyConstants.GRAVITINO_HIVE_FORMAT), "PARQUET");
Assertions.assertThrowsExactly(
NotSupportedException.class,
() ->
hivePropertiesConverter.toGravitinoTableProperties(
ImmutableMap.of(HivePropertyConstants.SPARK_PROVIDER, "notExists")));
ImmutableMap.of(TableCatalog.PROP_PROVIDER, "notExists")));

// row format
hiveProperties =
Expand Down

0 comments on commit 2d7c2ca

Please sign in to comment.