Skip to content

Commit

Permalink
Always allow unquoted keywords as column names (#539)
Browse files Browse the repository at this point in the history
* always allow unquote keywords as column names

* add comment + update unit test
  • Loading branch information
KevinGe00 authored Sep 26, 2024
1 parent 6172733 commit b507761
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -657,17 +657,13 @@ import org.slf4j.LoggerFactory;
this.hiveConf = hiveConf;
}
protected boolean useSQL11ReservedKeywordsForIdentifier() {
try {
/*
* Use the config string hive.support.sql11.reserved.keywords directly as
* HiveConf.ConfVars.HIVE_SUPPORT_SQL11_RESERVED_KEYWORDS might not be available in the hive-common present in the
* classpath during translation triggering the exception path defaulting to false
*/
return !hiveConf.get("hive.support.sql11.reserved.keywords").equalsIgnoreCase("true");
} catch (Throwable throwable) {
LOG.warn(throwable.getMessage());
return false;
}
/*
* This enables the translation of keywords as column names without adding backquotes. This is required for translating views
* created using spark engine as certain keywords in hive like timestamp are not keywords in spark. This will
* result in creation of views without backquoting those keywords. This should return false when coral-spark becomes
* a supported LHS for translations.
*/
return true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
Expand Down Expand Up @@ -105,20 +104,8 @@ public SqlNode processSql(String sql) {
return process(sql, null);
}

/**
* Returns true if the view is created using spark sql. This relies on the presence of the
* spark.sql.create.version property in the views when created using spark sql.
*
* @param hiveView
* @return true if the view is created using spark sql
*/
private static boolean isCreatedUsingSpark(Table hiveView) {
Map<String, String> tableParams = hiveView.getParameters();
return tableParams != null && tableParams.containsKey("spark.sql.create.version");
}

public SqlNode process(String sql, @Nullable Table hiveView) {
ParseDriver pd = new CoralParseDriver(hiveView != null && isCreatedUsingSpark(hiveView));
ParseDriver pd = new CoralParseDriver();
try {
ASTNode root = pd.parse(sql);
return processAST(root, hiveView);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,26 +13,13 @@
import org.antlr.runtime.TokenRewriteStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;


public class CoralParseDriver extends ParseDriver {

private static final Log LOG =
LogFactory.getLog("com.linkedin.coral.hive.hive2rel.parsetree.parser.CoralParseDriver");

private boolean useSQL11ReservedKeywordsForIdentifier;

public CoralParseDriver(boolean useSQL11ReservedKeywordsForIdentifier) {
super();
this.useSQL11ReservedKeywordsForIdentifier = useSQL11ReservedKeywordsForIdentifier;
}

public CoralParseDriver() {
super();
this.useSQL11ReservedKeywordsForIdentifier = false;
}

@Override
public ASTNode parse(String command) throws ParseException {
if (LOG.isDebugEnabled()) {
Expand All @@ -42,17 +29,6 @@ public ASTNode parse(String command) throws ParseException {
HiveLexerCoral lexer = new HiveLexerCoral(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
HiveParser parser = new HiveParser(tokens);
HiveConf hiveConf = new HiveConf();
/*
* This enables usage of keywords as column names without adding backquotes. This is required for translating views
* created using spark engine as certain keywords in hive like timestamp are not keywords in spark. This will
* result in creation of views without backquoting those keywords. This will be removed when coral-spark becomes
* a supported LHS for translations.
*/
if (useSQL11ReservedKeywordsForIdentifier) {
hiveConf.set("hive.support.sql11.reserved.keywords", "false");
parser.setHiveConf(hiveConf);
}
parser.setTreeAdaptor(adaptor);
HiveParser.statement_return r = null;
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,8 @@ public static TestHive setupDefaultHive(HiveConf conf) throws IOException {
driver.run("CREATE VIEW IF NOT EXISTS view_schema_evolve_wrapper AS SELECT * from view_schema_evolve");
driver.run("ALTER TABLE schema_evolve CHANGE COLUMN b b array<struct<b1:string, b2:double, b3:int>>");

driver.run("CREATE OR REPLACE VIEW test.spark_created_view AS SELECT 1 AS `timestamp` FROM test.tableOne");
// Simulate the creation of view using spark by setting the corresponding table property of the view.
driver.run("ALTER VIEW test.spark_created_view SET TBLPROPERTIES ('spark.sql.create.version'='3.1.1')");
driver.run(
"CREATE OR REPLACE VIEW test.quoted_reserved_keyword_view AS SELECT 1 AS `timestamp` FROM test.tableOne");

CommandProcessorResponse response = driver
.run("create function test_tableOneView_LessThanHundred as 'com.linkedin.coral.hive.hive2rel.CoralTestUDF'");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,12 +236,12 @@ public void testUnsupportedOuterExplodeWithoutColumns() {
}

/**
* Validates if coral-hive can translate views with unquoted reserved keywords when the views are created using spark.
* Validates if coral-hive can translate views with unquoted reserved keywords as column names.
*/
@Test
public void testUnquotedKeywordAsColumnName() {
HiveToRelConverter hiveToRelConverter = new HiveToRelConverter(msc);
Table table = msc.getTable("test", "spark_created_view");
Table table = msc.getTable("test", "quoted_reserved_keyword_view");
// Remove the backquotes associated with the view text
String input = table.getViewExpandedText().replaceAll("`", "");
SqlNode sqlNode = hiveToRelConverter.toSqlNode(input, table);
Expand Down

0 comments on commit b507761

Please sign in to comment.