Skip to content

Commit

Permalink
Throw exception for RECOVER INDEX JOB query (opensearch-project#2988) (
Browse files Browse the repository at this point in the history
…opensearch-project#3005)

(cherry picked from commit 2c878fb)

Signed-off-by: Tomoyuki Morita <[email protected]>
Signed-off-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
  • Loading branch information
1 parent eba7160 commit 3146b2f
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,9 @@ private AsyncQueryHandler getQueryHandlerForFlintExtensionQuery(
} else if (IndexQueryActionType.REFRESH.equals(indexQueryDetails.getIndexQueryActionType())) {
// Manual refresh should be handled by batch handler
return queryHandlerFactory.getRefreshQueryHandler(dispatchQueryRequest.getAccountId());
} else if (IndexQueryActionType.RECOVER.equals(indexQueryDetails.getIndexQueryActionType())) {
// RECOVER INDEX JOB should not be executed from async-query-core
throw new IllegalArgumentException("RECOVER INDEX JOB is not allowed.");
} else {
return getDefaultAsyncQueryHandler(dispatchQueryRequest.getAccountId());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,6 @@ public enum IndexQueryActionType {
SHOW,
DROP,
VACUUM,
ALTER
ALTER,
RECOVER
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.opensearch.sql.spark.antlr.parser.FlintSparkSqlExtensionsLexer;
import org.opensearch.sql.spark.antlr.parser.FlintSparkSqlExtensionsParser;
import org.opensearch.sql.spark.antlr.parser.FlintSparkSqlExtensionsParser.MaterializedViewQueryContext;
import org.opensearch.sql.spark.antlr.parser.FlintSparkSqlExtensionsParser.RecoverIndexJobStatementContext;
import org.opensearch.sql.spark.antlr.parser.SqlBaseLexer;
import org.opensearch.sql.spark.antlr.parser.SqlBaseParser;
import org.opensearch.sql.spark.antlr.parser.SqlBaseParser.IdentifierReferenceContext;
Expand Down Expand Up @@ -386,6 +387,12 @@ public Void visitMaterializedViewQuery(MaterializedViewQueryContext ctx) {
return super.visitMaterializedViewQuery(ctx);
}

@Override
public Void visitRecoverIndexJobStatement(RecoverIndexJobStatementContext ctx) {
indexQueryDetailsBuilder.indexQueryActionType(IndexQueryActionType.RECOVER);
return super.visitRecoverIndexJobStatement(ctx);
}

private String propertyKey(FlintSparkSqlExtensionsParser.PropertyKeyContext key) {
if (key.STRING() != null) {
return key.STRING().getText();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -622,6 +622,16 @@ void testDispatchVacuumIndexQuery() {
testDispatchBatchQuery("VACUUM INDEX elb_and_requestUri ON my_glue.default.http_logs");
}

@Test
void testDispatchRecoverIndexQuery() {
String query = "RECOVER INDEX JOB `flint_spark_catalog_default_test_skipping_index`";
Assertions.assertThrows(
IllegalArgumentException.class,
() ->
sparkQueryDispatcher.dispatch(
getBaseDispatchQueryRequest(query), asyncQueryRequestContext));
}

@Test
void testDispatchWithUnSupportedDataSourceType() {
when(dataSourceService.verifyDataSourceAccessAndGetRawMetadata(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,15 @@ void testAutoRefresh() {
.autoRefresh());
}

@Test
void testRecoverIndex() {
String refreshSkippingIndex =
"RECOVER INDEX JOB `flint_spark_catalog_default_test_skipping_index`";
assertTrue(SQLQueryUtils.isFlintExtensionQuery(refreshSkippingIndex));
IndexQueryDetails indexDetails = SQLQueryUtils.extractIndexDetails(refreshSkippingIndex);
assertEquals(IndexQueryActionType.RECOVER, indexDetails.getIndexQueryActionType());
}

@Test
void testValidateSparkSqlQuery_ValidQuery() {
List<String> errors =
Expand Down

0 comments on commit 3146b2f

Please sign in to comment.