diff --git a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java index 0b6708dbbf2..ec03e779679 100644 --- a/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java +++ b/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java @@ -17,8 +17,8 @@ /** Transform hive catalog properties between Spark and Gravitino. */ public class HivePropertiesConverter implements PropertiesConverter { - // Transform Spark format to Gravitino format - static final Map hiveTableFormatMap = + // Transform Spark hive file format to Gravitino hive file format + static final Map fileFormatMap = ImmutableMap.of( "sequencefile", "SEQUENCEFILE", "rcfile", "RCFILE", @@ -49,15 +49,15 @@ public Map toGravitinoTableProperties(Map proper PropertiesConverter.transformOptionProperties(properties); String provider = gravitinoTableProperties.get(TableCatalog.PROP_PROVIDER); String storeAs = gravitinoTableProperties.get(HivePropertyConstants.SPARK_HIVE_STORED_AS); - String sparkHiveTableFormat = Optional.ofNullable(storeAs).orElse(provider); - if (sparkHiveTableFormat != null) { + String fileFormat = Optional.ofNullable(storeAs).orElse(provider); + if (fileFormat != null) { String gravitinoFormat = - hiveTableFormatMap.get(sparkHiveTableFormat.toLowerCase(Locale.ROOT)); + fileFormatMap.get(fileFormat.toLowerCase(Locale.ROOT)); if (gravitinoFormat != null) { gravitinoTableProperties.put(HivePropertyConstants.GRAVITINO_HIVE_FORMAT, gravitinoFormat); } else { throw new NotSupportedException( - "Doesn't support spark hive table format: " + sparkHiveTableFormat); + "Doesn't support hive file format: " + fileFormat); } }