diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkCommonIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkCommonIT.java index 6b735affd69..c862485f8ef 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkCommonIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkCommonIT.java @@ -442,6 +442,23 @@ void testComplexType() { checkTableReadWrite(tableInfo); } + @Test + void testTableOptions() { + String tableName = "simple_table"; + dropTableIfExists(tableName); + String createTableSql = getCreateSimpleTableString(tableName); + createTableSql += " OPTIONS('a'='b')"; + sql(createTableSql); + SparkTableInfo tableInfo = getTableInfo(tableName); + + SparkTableInfoChecker checker = + SparkTableInfoChecker.create() + .withName(tableName) + .withTableProperties(ImmutableMap.of("a", "b")); + checker.check(tableInfo); + checkTableReadWrite(tableInfo); + } + private void checkTableColumns( String tableName, List columnInfos, SparkTableInfo tableInfo) { SparkTableInfoChecker.create() @@ -451,7 +468,7 @@ private void checkTableColumns( .check(tableInfo); } - private void checkTableReadWrite(SparkTableInfo table) { + protected void checkTableReadWrite(SparkTableInfo table) { String name = table.getTableIdentifier(); String insertValues = table.getColumns().stream() @@ -514,7 +531,7 @@ private void checkTableReadWrite(SparkTableInfo table) { Assertions.assertEquals(checkValues, queryResult.get(0)); } - private String getCreateSimpleTableString(String tableName) { + protected String getCreateSimpleTableString(String tableName) { return String.format( "CREATE TABLE %s (id INT COMMENT 'id comment', name STRING COMMENT '', age INT)", tableName); @@ -529,8 +546,13 @@ private List getSimpleTableColumn() { // Helper method to create a simple table, and could use corresponding // getSimpleTableColumn to check table column. - private void createSimpleTable(String identifier) { + protected void createSimpleTable(String identifier) { String createTableSql = getCreateSimpleTableString(identifier); sql(createTableSql); } + + protected void checkParquetFile(SparkTableInfo tableInfo) { + String location = tableInfo.getTableLocation(); + Assertions.assertDoesNotThrow(() -> getSparkSession().read().parquet(location).printSchema()); + } } diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkEnvIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkEnvIT.java index b0b7fd895e6..3eb2c146538 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkEnvIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/SparkEnvIT.java @@ -31,8 +31,8 @@ public abstract class SparkEnvIT extends SparkUtilIT { private final String metalakeName = "test"; private SparkSession sparkSession; - private String hiveMetastoreUri; - private String gravitinoUri; + private String hiveMetastoreUri = "thrift://127.0.0.1:9083"; + private String gravitinoUri = "http://127.0.0.1:8090"; protected abstract String getCatalogName(); diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/hive/SparkHiveCatalogIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/hive/SparkHiveCatalogIT.java index bce6cb212bf..e5abd90c1ab 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/hive/SparkHiveCatalogIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/hive/SparkHiveCatalogIT.java @@ -5,7 +5,14 @@ package com.datastrato.gravitino.integration.test.spark.hive; import com.datastrato.gravitino.integration.test.spark.SparkCommonIT; +import com.datastrato.gravitino.integration.test.util.spark.SparkTableInfo; +import com.datastrato.gravitino.integration.test.util.spark.SparkTableInfoChecker; +import com.datastrato.gravitino.spark.connector.hive.HivePropertyConstants; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; @Tag("gravitino-docker-it") @@ -21,4 +28,165 @@ protected String getCatalogName() { protected String getProvider() { return "hive"; } + + @Test + void testHiveDefaultFormat() { + String tableName = "hive_default_format_table"; + dropTableIfExists(tableName); + createSimpleTable(tableName); + SparkTableInfo tableInfo = getTableInfo(tableName); + + SparkTableInfoChecker checker = + SparkTableInfoChecker.create() + .withName(tableName) + .withTableProperties( + ImmutableMap.of( + HivePropertyConstants.SPARK_HIVE_INPUT_FORMAT, + HivePropertyConstants.TEXT_INPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_OUTPUT_FORMAT, + HivePropertyConstants.IGNORE_KEY_OUTPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_SERDE_LIB, + HivePropertyConstants.LAZY_SIMPLE_SERDE_CLASS)); + checker.check(tableInfo); + checkTableReadWrite(tableInfo); + } + + @Test + void testHiveFormatWithStoredAs() { + // TEXTFILE, ORC, PARQUET + String tableName = "test_table_property_stored_as_table"; + dropTableIfExists(tableName); + String createTableSql = getCreateSimpleTableString(tableName); + createTableSql += "STORED AS PARQUET"; + sql(createTableSql); + SparkTableInfo tableInfo = getTableInfo(tableName); + + SparkTableInfoChecker checker = + SparkTableInfoChecker.create() + .withName(tableName) + .withTableProperties( + ImmutableMap.of( + HivePropertyConstants.SPARK_HIVE_INPUT_FORMAT, + HivePropertyConstants.PARQUET_INPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_OUTPUT_FORMAT, + HivePropertyConstants.PARQUET_OUTPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_SERDE_LIB, + HivePropertyConstants.PARQUET_SERDE_CLASS)); + checker.check(tableInfo); + checkTableReadWrite(tableInfo); + checkParquetFile(tableInfo); + } + + @Test + void testHiveFormatWithUsing() { + String tableName = "test_hive_format_using_table"; + dropTableIfExists(tableName); + String createTableSql = getCreateSimpleTableString(tableName); + // CSV, TXT, ORC, JDBC, PARQUET + createTableSql += "USING PARQUET"; + sql(createTableSql); + SparkTableInfo tableInfo = getTableInfo(tableName); + + SparkTableInfoChecker checker = + SparkTableInfoChecker.create() + .withName(tableName) + .withTableProperties( + ImmutableMap.of( + HivePropertyConstants.SPARK_HIVE_INPUT_FORMAT, + HivePropertyConstants.PARQUET_INPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_OUTPUT_FORMAT, + HivePropertyConstants.PARQUET_OUTPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_SERDE_LIB, + HivePropertyConstants.PARQUET_SERDE_CLASS)); + checker.check(tableInfo); + checkTableReadWrite(tableInfo); + checkParquetFile(tableInfo); + } + + @Test + void testHivePropertiesWithSerdeRowFormat() { + String tableName = "test_hive_row_serde_table"; + dropTableIfExists(tableName); + String createTableSql = getCreateSimpleTableString(tableName); + createTableSql = + String.format( + "%s ROW FORMAT SERDE '%s' WITH SERDEPROPERTIES ('serialization.format'='@', 'field.delim' = ',') STORED AS INPUTFORMAT '%s' OUTPUTFORMAT '%s'", + createTableSql, + HivePropertyConstants.PARQUET_SERDE_CLASS, + HivePropertyConstants.PARQUET_INPUT_FORMAT_CLASS, + HivePropertyConstants.PARQUET_OUTPUT_FORMAT_CLASS); + sql(createTableSql); + SparkTableInfo tableInfo = getTableInfo(tableName); + + SparkTableInfoChecker checker = + SparkTableInfoChecker.create() + .withName(tableName) + .withTableProperties( + ImmutableMap.of( + "serialization.format", + "@", + "field.delim", + ",", + HivePropertyConstants.SPARK_HIVE_INPUT_FORMAT, + HivePropertyConstants.PARQUET_INPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_OUTPUT_FORMAT, + HivePropertyConstants.PARQUET_OUTPUT_FORMAT_CLASS, + HivePropertyConstants.SPARK_HIVE_SERDE_LIB, + HivePropertyConstants.PARQUET_SERDE_CLASS)); + checker.check(tableInfo); + checkTableReadWrite(tableInfo); + checkParquetFile(tableInfo); + } + + /* + | DELIMITED [ FIELDS TERMINATED BY fields_terminated_char [ ESCAPED BY escaped_char ] ] + [ COLLECTION ITEMS TERMINATED BY collection_items_terminated_char ] + [ MAP KEYS TERMINATED BY map_key_terminated_char ] + [ LINES TERMINATED BY row_terminated_char ] + [ NULL DEFINED AS null_char ] + */ + @Test + void testHivePropertiesWithDelimitedRowFormat() { + String tableName = "test_hive_row_format_table"; + dropTableIfExists(tableName); + String createTableSql = getCreateSimpleTableString(tableName); + createTableSql += + "ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ESCAPED BY ';' " + + "COLLECTION ITEMS TERMINATED BY '@' " + + "MAP KEYS TERMINATED BY ':' " + + "LINES TERMINATED BY '\\n' " + + "NULL DEFINED AS 'n' " + + "STORED AS TEXTFILE"; + System.out.println(createTableSql); + sql(createTableSql); + SparkTableInfo tableInfo = getTableInfo(tableName); + + SparkTableInfoChecker checker = + SparkTableInfoChecker.create() + .withName(tableName) + .withTableProperties( + ImmutableMap.of( + "field.delim", ",", + "escape.delim", ";", + "mapkey.delim", ":", + "serialization.format", ",", + "colelction.delim", "@")); + checker.check(tableInfo); + checkTableReadWrite(tableInfo); + + // check field.delim take effects + List rows = + rowsToJava( + getSparkSession() + .read() + .option("delimiter", ",") + .csv(tableInfo.getTableLocation()) + .collectAsList()); + Assertions.assertTrue(rows.size() == 1); + Object[] row = rows.get(0); + Assertions.assertEquals(3, row.length); + Assertions.assertEquals("2", row[0]); + Assertions.assertEquals("gravitino_it_test", (String) row[1]); + Assertions.assertEquals("2", row[2]); + } } diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfo.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfo.java index 65e06c977c3..3370b0430a6 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfo.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfo.java @@ -6,6 +6,7 @@ package com.datastrato.gravitino.integration.test.util.spark; import com.datastrato.gravitino.spark.connector.ConnectorConstants; +import com.datastrato.gravitino.spark.connector.hive.HivePropertyConstants; import com.datastrato.gravitino.spark.connector.table.SparkBaseTable; import java.util.ArrayList; import java.util.Arrays; @@ -33,6 +34,10 @@ public String getTableName() { return tableName; } + public String getTableLocation() { + return tableProperties.get(HivePropertyConstants.GRAVITINO_HIVE_LOCATION); + } + // Include database name and table name public String getTableIdentifier() { if (StringUtils.isNotBlank(database)) { diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfoChecker.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfoChecker.java index e95730d1ae3..390a9522e28 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfoChecker.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkTableInfoChecker.java @@ -8,6 +8,7 @@ import com.datastrato.gravitino.integration.test.util.spark.SparkTableInfo.SparkColumnInfo; import java.util.ArrayList; import java.util.List; +import java.util.Map; import org.junit.jupiter.api.Assertions; /** @@ -28,6 +29,7 @@ private enum CheckField { NAME, COLUMN, COMMENT, + TABLE_PROPERTY, } public SparkTableInfoChecker withName(String name) { @@ -48,6 +50,12 @@ public SparkTableInfoChecker withComment(String comment) { return this; } + public SparkTableInfoChecker withTableProperties(Map properties) { + this.expectedTableInfo.setTableProperties(properties); + this.checkFields.add(CheckField.TABLE_PROPERTY); + return this; + } + public void check(SparkTableInfo realTableInfo) { checkFields.stream() .forEach( @@ -65,6 +73,17 @@ public void check(SparkTableInfo realTableInfo) { Assertions.assertEquals( expectedTableInfo.getComment(), realTableInfo.getComment()); break; + case TABLE_PROPERTY: + Map realTableProperties = realTableInfo.getTableProperties(); + expectedTableInfo + .getTableProperties() + .forEach( + (k, v) -> { + Assertions.assertTrue( + realTableProperties.containsKey(k), k + " not exits"); + Assertions.assertEquals(v, realTableProperties.get(k)); + }); + break; default: Assertions.fail(checkField + " not checked"); break; diff --git a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkUtilIT.java b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkUtilIT.java index 36bdc22ba59..af80228ba39 100644 --- a/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkUtilIT.java +++ b/integration-test/src/test/java/com/datastrato/gravitino/integration/test/util/spark/SparkUtilIT.java @@ -110,7 +110,7 @@ protected boolean tableExists(String tableName) { } } - private List rowsToJava(List rows) { + protected List rowsToJava(List rows) { return rows.stream().map(this::toJava).collect(Collectors.toList()); } diff --git a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/PropertiesConverter.java b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/PropertiesConverter.java index fdcb916c41b..7c370d5377c 100644 --- a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/PropertiesConverter.java +++ b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/PropertiesConverter.java @@ -5,11 +5,29 @@ package com.datastrato.gravitino.spark.connector; +import com.datastrato.gravitino.spark.connector.hive.HivePropertyConstants; import java.util.Map; +import java.util.stream.Collectors; /** Transform table properties between Gravitino and Spark. */ public interface PropertiesConverter { Map toGravitinoTableProperties(Map properties); Map toSparkTableProperties(Map properties); + + static Map transformOptionProperties(Map properties) { + return properties.entrySet().stream() + .collect( + Collectors.toMap( + entry -> { + String key = entry.getKey(); + if (key.startsWith(HivePropertyConstants.SPARK_OPTION_PREFIX)) { + return key.substring(HivePropertyConstants.SPARK_OPTION_PREFIX.length()); + } else { + return key; + } + }, + entry -> entry.getValue(), + (existingValue, newValue) -> existingValue)); + } } diff --git a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java index 0c816106db2..b3cfa742fe9 100644 --- a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java +++ b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java @@ -6,15 +6,64 @@ package com.datastrato.gravitino.spark.connector.hive; import com.datastrato.gravitino.spark.connector.PropertiesConverter; +import com.google.common.collect.ImmutableMap; import java.util.HashMap; +import java.util.Locale; import java.util.Map; +import java.util.Optional; -/** Transform hive catalog properties between Spark and Gravitino. Will implement in another PR. */ +/** Transform hive catalog properties between Spark and Gravitino. */ public class HivePropertiesConverter implements PropertiesConverter { + // Transform Spark format to Gravitino format + static final Map hiveTableFormatMap = + ImmutableMap.of( + "sequencefile", "SEQUENCEFILE", + "rcfile", "RCFILE", + "orc", "ORC", + "parquet", "PARQUET", + "textfile", "TEXTFILE", + "avro", "AVRO"); + + static final Map sparkToGravitinoPropertyMap = + ImmutableMap.of( + "hive.output-format", + HivePropertyConstants.GRAVITINO_HIVE_OUTPUT_FORMAT, + "hive.input-format", + HivePropertyConstants.GRAVITINO_HIVE_INPUT_FORMAT, + "hive.serde", + HivePropertyConstants.GRAVITINO_HIVE_SERDE_LIB); + + /** + * CREATE TABLE xxx STORED AS PARQUET will save "hive.stored.as" = "PARQUET" in property. CREATE + * TABLE xxx USING PARQUET will save "provider" = "PARQUET" in property. CREATE TABLE xxx ROW + * FORMAT SERDE xx STORED AS INPUTFORMAT xx OUTPUTFORMAT xx will save "hive.input-format", + * "hive.output-format", "hive.serde" in property. CREATE TABLE xxx ROW FORMAT DELIMITED FIELDS + * TERMINATED xx will save "option.xx" in property. + */ @Override public Map toGravitinoTableProperties(Map properties) { - return new HashMap<>(properties); + Map gravitinoTableProperties = + PropertiesConverter.transformOptionProperties(properties); + String provider = properties.get(HivePropertyConstants.SPARK_PROVIDER); + String storeAs = properties.get(HivePropertyConstants.SPARK_HIVE_STORED_AS); + String sparkFormat = Optional.ofNullable(storeAs).orElse(provider); + if (sparkFormat != null) { + String gravitinoFormat = hiveTableFormatMap.get(sparkFormat.toLowerCase(Locale.ROOT)); + if (gravitinoFormat != null) { + gravitinoTableProperties.put(HivePropertyConstants.GRAVITINO_HIVE_FORMAT, gravitinoFormat); + } + } + + sparkToGravitinoPropertyMap.forEach( + (sparkProperty, gravitinoProperty) -> { + if (gravitinoTableProperties.containsKey(sparkProperty)) { + String value = gravitinoTableProperties.remove(sparkProperty); + gravitinoTableProperties.put(gravitinoProperty, value); + } + }); + + return gravitinoTableProperties; } @Override diff --git a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertyConstants.java b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertyConstants.java new file mode 100644 index 00000000000..08b561d1b38 --- /dev/null +++ b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertyConstants.java @@ -0,0 +1,51 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.spark.connector.hive; + +public class HivePropertyConstants { + + public static final String GRAVITINO_HIVE_FORMAT = "format"; + public static final String GRAVITINO_HIVE_LOCATION = "location"; + public static final String GRAVITINO_HIVE_TABLE_TYPE = "table-type"; + public static final String GRAVITINO_HIVE_INPUT_FORMAT = "input-format"; + public static final String GRAVITINO_HIVE_OUTPUT_FORMAT = "output-format"; + public static final String GRAVITINO_HIVE_SERDE_LIB = "serde-lib"; + + public static final String SPARK_PROVIDER = "provider"; + public static final String SPARK_OPTION_PREFIX = "option."; + public static final String SPARK_HIVE_STORED_AS = "hive.stored-as"; + public static final String SPARK_HIVE_INPUT_FORMAT = "input-format"; + public static final String SPARK_HIVE_OUTPUT_FORMAT = "output-format"; + public static final String SPARK_HIVE_SERDE_LIB = "serde-lib"; + + public static final String TEXT_INPUT_FORMAT_CLASS = "org.apache.hadoop.mapred.TextInputFormat"; + public static final String IGNORE_KEY_OUTPUT_FORMAT_CLASS = + "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"; + public static final String LAZY_SIMPLE_SERDE_CLASS = + "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; + + public static final String SEQUENCEFILE_INPUT_FORMAT_CLASS = + "org.apache.hadoop.mapred.SequenceFileInputFormat"; + public static final String SEQUENCEFILE_OUTPUT_FORMAT_CLASS = + "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"; + + public static final String ORC_INPUT_FORMAT_CLASS = + "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"; + + public static final String ORC_OUTPUT_FORMAT_CLASS = + "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"; + + public static final String ORC_SERDE_CLASS = "org.apache.hadoop.hive.ql.io.orc.OrcSerde"; + + public static final String PARQUET_INPUT_FORMAT_CLASS = + "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat"; + public static final String PARQUET_OUTPUT_FORMAT_CLASS = + "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat"; + public static final String PARQUET_SERDE_CLASS = + "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"; + + private HivePropertyConstants() {} +} diff --git a/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/TestPropertiesConverter.java b/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/TestPropertiesConverter.java new file mode 100644 index 00000000000..d01daecc320 --- /dev/null +++ b/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/TestPropertiesConverter.java @@ -0,0 +1,21 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.spark.connector; + +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class TestPropertiesConverter { + + @Test + void testTransformOption() { + Map properties = ImmutableMap.of("a", "b", "option.a1", "b"); + Map result = PropertiesConverter.transformOptionProperties(properties); + Assertions.assertEquals(ImmutableMap.of("a", "b", "a1", "b"), result); + } +} diff --git a/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java b/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java new file mode 100644 index 00000000000..2c97ddd67c5 --- /dev/null +++ b/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java @@ -0,0 +1,21 @@ +/* + * Copyright 2024 Datastrato Pvt Ltd. + * This software is licensed under the Apache License version 2. + */ + +package com.datastrato.gravitino.spark.connector.hive; + +import org.junit.jupiter.api.Test; + +public class TestHivePropertiesConverter { + + @Test + void testTableFormat() { + // stored as + + // using + + // row format + + } +}