Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for Databricks runtime 14.3 & 15.4 LTS #21700

Merged
merged 2 commits into from
Oct 1, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -881,6 +881,7 @@ jobs:
- suite-delta-lake-databricks113
- suite-delta-lake-databricks122
- suite-delta-lake-databricks133
- suite-delta-lake-databricks143
- suite-databricks-unity-http-hms
- suite-gcs
- suite-clients
Expand Down Expand Up @@ -929,6 +930,9 @@ jobs:
- suite: suite-delta-lake-databricks133
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks143
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-databricks-unity-http-hms
config: hdp3
- suite: suite-databricks-unity-http-hms
Expand Down Expand Up @@ -987,6 +991,7 @@ jobs:
DATABRICKS_113_JDBC_URL: ""
DATABRICKS_122_JDBC_URL: ""
DATABRICKS_133_JDBC_URL: ""
DATABRICKS_143_JDBC_URL: ""
DATABRICKS_UNITY_JDBC_URL: ""
DATABRICKS_UNITY_CATALOG_NAME: ""
DATABRICKS_UNITY_EXTERNAL_LOCATION: ""
Expand Down Expand Up @@ -1063,6 +1068,7 @@ jobs:
DATABRICKS_113_JDBC_URL: ${{ vars.DATABRICKS_113_JDBC_URL }}
DATABRICKS_122_JDBC_URL: ${{ vars.DATABRICKS_122_JDBC_URL }}
DATABRICKS_133_JDBC_URL: ${{ vars.DATABRICKS_133_JDBC_URL }}
DATABRICKS_143_JDBC_URL: ${{ vars.DATABRICKS_143_JDBC_URL }}
DATABRICKS_UNITY_JDBC_URL: ${{ vars.DATABRICKS_UNITY_JDBC_URL }}
DATABRICKS_UNITY_CATALOG_NAME: ${{ vars.DATABRICKS_UNITY_CATALOG_NAME }}
DATABRICKS_UNITY_EXTERNAL_LOCATION: ${{ vars.DATABRICKS_UNITY_EXTERNAL_LOCATION }}
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/sphinx/connector/delta-lake.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ data.
To connect to Databricks Delta Lake, you need:

- Tables written by Databricks Runtime 7.3 LTS, 9.1 LTS, 10.4 LTS, 11.3 LTS,
12.2 LTS and 13.3 LTS are supported.
12.2 LTS, 13.3 LTS and 14.3 LTS are supported.
- Deployments using AWS, HDFS, Azure Storage, and Google Cloud Storage (GCS) are
fully supported.
- Network access from the coordinator and workers to the Delta Lake storage.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public final class TestGroups
public static final String DELTA_LAKE_DATABRICKS_104 = "delta-lake-databricks-104";
public static final String DELTA_LAKE_DATABRICKS_113 = "delta-lake-databricks-113";
public static final String DELTA_LAKE_DATABRICKS_122 = "delta-lake-databricks-122";
public static final String DELTA_LAKE_DATABRICKS_133 = "delta-lake-databricks-133";
public static final String DATABRICKS_UNITY_HTTP_HMS = "databricks-unity-http-hms";
public static final String DELTA_LAKE_EXCLUDE_91 = "delta-lake-exclude-91";
public static final String DELTA_LAKE_ALLUXIO_CACHING = "delta-lake-alluxio-caching";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.env.environment;

import com.google.inject.Inject;
import io.trino.tests.product.launcher.docker.DockerFiles;
import io.trino.tests.product.launcher.env.common.Standard;
import io.trino.tests.product.launcher.env.common.TestsEnvironment;

import static java.util.Objects.requireNonNull;

@TestsEnvironment
public class EnvSinglenodeDeltaLakeDatabricks143
extends AbstractSinglenodeDeltaLakeDatabricks
{
@Inject
public EnvSinglenodeDeltaLakeDatabricks143(Standard standard, DockerFiles dockerFiles)
{
super(standard, dockerFiles);
}

@Override
String databricksTestJdbcUrl()
{
return requireNonNull(System.getenv("DATABRICKS_143_JDBC_URL"), "Environment DATABRICKS_143_JDBC_URL was not set");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import java.util.List;

import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteDeltaLakeDatabricks133
Expand All @@ -33,7 +33,7 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks133.class)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS_133)
.withExcludedTests(getExcludedTests())
.build());
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.suite.suites;

import com.google.common.collect.ImmutableList;
import io.trino.tests.product.launcher.env.EnvironmentConfig;
import io.trino.tests.product.launcher.env.environment.EnvSinglenodeDeltaLakeDatabricks143;
import io.trino.tests.product.launcher.suite.SuiteDeltaLakeDatabricks;
import io.trino.tests.product.launcher.suite.SuiteTestRun;

import java.util.List;

import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteDeltaLakeDatabricks143
extends SuiteDeltaLakeDatabricks
{
@Override
public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks143.class)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS)
.withExcludedTests(getExcludedTests())
.build());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_143_RUNTIME_VERSION;
import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_91_RUNTIME_VERSION;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_MATCH;
Expand Down Expand Up @@ -274,6 +275,11 @@ public void testTrinoPreservesTableFeature()
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testTrinoAlterTablePreservesGeneratedColumn()
{
if (getDatabricksRuntimeVersion().orElseThrow().isAtLeast(DATABRICKS_143_RUNTIME_VERSION)) {
// The following COMMENT statement throws an exception (expected) because version >= 14.3 stores 'generatedColumns' writer feature
return;
}

String tableName = "test_trino_alter_table_preserves_generated_column_" + randomNameSuffix();
String tableDirectory = "databricks-compatibility-test-" + tableName;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,17 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_113;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_122;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_143_RUNTIME_VERSION;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_MATCH;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.dropDeltaTableWithRetry;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getColumnCommentOnDelta;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getColumnCommentOnTrino;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getDatabricksRuntimeVersion;
import static io.trino.tests.product.utils.QueryExecutors.onDelta;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static org.assertj.core.api.Assertions.assertThat;
Expand Down Expand Up @@ -72,7 +75,7 @@ public void testNonLowercaseColumnNames()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testNonLowercaseFieldNames()
{
Expand Down Expand Up @@ -161,6 +164,11 @@ public void testNotNullColumnWithNonLowerCaseColumnName()
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testGeneratedColumnWithNonLowerCaseColumnName()
{
if (getDatabricksRuntimeVersion().orElseThrow().isAtLeast(DATABRICKS_143_RUNTIME_VERSION)) {
// The following COMMENT statement throws an exception (expected) because version >= 14.3 stores 'generatedColumns' writer feature
return;
}

String tableName = "test_dl_generated_column_uppercase_name" + randomNameSuffix();

onDelta().executeQuery("" +
Expand Down Expand Up @@ -210,6 +218,11 @@ public void testIdentityColumnWithNonLowerCaseColumnName()
"ON (t.data = s.data) WHEN MATCHED THEN UPDATE SET data = 1"))
.hasMessageContaining("Writing to tables with identity columns is not supported");

if (getDatabricksRuntimeVersion().orElseThrow().isAtLeast(DATABRICKS_143_RUNTIME_VERSION)) {
// The following statement throws an exception (expected) because version >= 14.3 stores 'identityColumns' writer feature
return;
}

// Verify column operations preserves the identify column name and property
onTrino().executeQuery("ALTER TABLE delta.default." + tableName + " ADD COLUMN new_col integer");
onTrino().executeQuery("ALTER TABLE delta.default." + tableName + " RENAME COLUMN new_col TO renamed_col");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.testng.annotations.Test;

import java.util.List;
import java.util.Map;

import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
Expand All @@ -33,6 +34,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_113;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_122;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
Expand Down Expand Up @@ -110,6 +112,13 @@ public void testUpdateTableWithChangeDataFeedWriterFeature()
try {
assertThat(onTrino().executeQuery("SHOW CREATE TABLE delta.default." + tableName).getOnlyValue().toString()).contains("change_data_feed_enabled = true");

// TODO https://github.com/trinodb/trino/issues/23620 Fix incorrect CDC entry when deletion vector is enabled
Map<String, String> properties = getTablePropertiesOnDelta("default", tableName);
if (properties.getOrDefault("delta.enableChangeDataFeed", "false").equals("true") &&
properties.getOrDefault("delta.enableDeletionVectors", "false").equals("true")) {
return;
}

onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES ('testValue1', 1), ('testValue2', 2), ('testValue3', 3)");
onTrino().executeQuery("UPDATE delta.default." + tableName + " SET updated_column = 30 WHERE col1 = 'testValue3'");

Expand Down Expand Up @@ -203,6 +212,13 @@ public void testUpdatePartitionedTableWithCdf(String columnMappingMode)
"LOCATION 's3://" + bucketName + "/databricks-compatibility-test-" + tableName + "'" +
"TBLPROPERTIES (delta.enableChangeDataFeed = true, 'delta.columnMapping.mode'='" + columnMappingMode + "')");

// TODO https://github.com/trinodb/trino/issues/23620 Fix incorrect CDC entry when deletion vector is enabled
Map<String, String> properties = getTablePropertiesOnDelta("default", tableName);
if (properties.getOrDefault("delta.enableChangeDataFeed", "false").equals("true") &&
properties.getOrDefault("delta.enableDeletionVectors", "false").equals("true")) {
return;
}

onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES ('testValue1', 1, 'partition1')");
onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES ('testValue2', 2, 'partition2')");
onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES ('testValue3', 3, 'partition3')");
Expand Down Expand Up @@ -381,6 +397,13 @@ public void testDeleteFromTableWithCdf(String columnMappingMode)
"LOCATION 's3://" + bucketName + "/databricks-compatibility-test-" + tableName + "'" +
"TBLPROPERTIES (delta.enableChangeDataFeed = true, 'delta.columnMapping.mode' = '" + columnMappingMode + "')");

// TODO https://github.com/trinodb/trino/issues/23620 Fix incorrect CDC entry when deletion vector is enabled
Map<String, String> properties = getTablePropertiesOnDelta("default", tableName);
if (properties.getOrDefault("delta.enableChangeDataFeed", "false").equals("true") &&
properties.getOrDefault("delta.enableDeletionVectors", "false").equals("true")) {
return;
}

onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES('testValue1', 1)");
onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES('testValue2', 2)");
onDelta().executeQuery("INSERT INTO default." + tableName + " VALUES('testValue3', 3)");
Expand Down Expand Up @@ -509,7 +532,7 @@ public void testMergeDeleteIntoTableWithCdfEnabled(String columnMappingMode)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testMergeMixedDeleteAndUpdateIntoTableWithCdfEnabled()
{
Expand All @@ -524,6 +547,13 @@ public void testMergeMixedDeleteAndUpdateIntoTableWithCdfEnabled()
"USING DELTA " +
"LOCATION 's3://" + bucketName + "/databricks-compatibility-test-" + sourceTableName + "'");

// TODO https://github.com/trinodb/trino/issues/23620 Fix incorrect CDC entry when deletion vector is enabled
Map<String, String> properties = getTablePropertiesOnDelta("default", targetTableName);
if (properties.getOrDefault("delta.enableChangeDataFeed", "false").equals("true") &&
properties.getOrDefault("delta.enableDeletionVectors", "false").equals("true")) {
return;
}

onDelta().executeQuery("INSERT INTO default." + targetTableName + " VALUES (1, 'pageUrl1', 100)");
onDelta().executeQuery("INSERT INTO default." + targetTableName + " VALUES (2, 'pageUrl2', 200)");
onDelta().executeQuery("INSERT INTO default." + targetTableName + " VALUES (3, 'pageUrl3', 300)");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_113;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_122;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.TransactionLogAssertions.assertLastEntryIsCheckpointed;
Expand Down Expand Up @@ -277,7 +278,7 @@ private void trinoUsesCheckpointInterval(String deltaTableProperties)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_DATABRICKS_133, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testDatabricksUsesCheckpointInterval()
{
Expand Down
Loading