Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[#2678] improvement(test): Add e2e test for jdbc backend #2686

Merged
merged 15 commits into from
Apr 2, 2024
3 changes: 2 additions & 1 deletion .github/workflows/backend-integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:
needs: changes
if: needs.changes.outputs.source_changes == 'true'
runs-on: ubuntu-latest
timeout-minutes: 30
timeout-minutes: 60
strategy:
matrix:
architecture: [linux/amd64]
Expand Down Expand Up @@ -83,6 +83,7 @@ jobs:
- name: Backend Integration Test
id: integrationTest
run: |
./gradlew test --rerun-tasks -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PskipWebITs -PjdbcBackend
yuqi1129 marked this conversation as resolved.
Show resolved Hide resolved
./gradlew test --rerun-tasks -PskipTests -PtestMode=${{ matrix.test-mode }} -PjdkVersion=${{ matrix.java-version }} -PskipWebITs

- name: Upload integrate tests reports
Expand Down
3 changes: 3 additions & 0 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,9 @@ allprojects {

// Change poll image pause time from 30s to 60s
param.environment("TESTCONTAINERS_PULL_PAUSE_TIMEOUT", "60")
if (project.hasProperty("jdbcBackend")) {
param.environment("jdbcBackend", "true")
}

val testMode = project.properties["testMode"] as? String ?: "embedded"
param.systemProperty("gravitino.log.path", project.buildDir.path + "/${project.name}-integration-test.log")
Expand Down
2 changes: 2 additions & 0 deletions catalogs/catalog-hadoop/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,11 @@ dependencies {

testImplementation(libs.bundles.log4j)
testImplementation(libs.mockito.core)
testImplementation(libs.mysql.driver)
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.junit.jupiter.params)
testImplementation(libs.testcontainers)
testImplementation(libs.testcontainers.mysql)

testRuntimeOnly(libs.junit.jupiter.engine)
}
Expand Down
2 changes: 2 additions & 0 deletions catalogs/catalog-hive/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ dependencies {
}
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.mockito.core)
testImplementation(libs.mysql.driver)

testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") {
exclude("org.apache.hadoop")
Expand All @@ -106,6 +107,7 @@ dependencies {
}
testImplementation(libs.slf4j.api)
testImplementation(libs.testcontainers)
testImplementation(libs.testcontainers.mysql)

testRuntimeOnly(libs.junit.jupiter.engine)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
@Tag("gravitino-docker-it")
@TestInstance(Lifecycle.PER_CLASS)
public class CatalogMysqlIT extends AbstractIT {

private static final String provider = "jdbc-mysql";
public static final String DOWNLOAD_JDBC_DRIVER_URL =
"https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.27/mysql-connector-java-8.0.27.jar";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,14 +137,14 @@ Integer updateCatalogMeta(
@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE catalog_id = #{catalogId} AND deleted_at = 0")
Integer softDeleteCatalogMetasByCatalogId(@Param("catalogId") Long catalogId);

@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE metalake_id = #{metalakeId} AND deleted_at = 0")
Integer softDeleteCatalogMetasByMetalakeId(@Param("metalakeId") Long metalakeId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -190,28 +190,28 @@ Integer updateFilesetMeta(
@Update(
"UPDATE "
+ META_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE metalake_id = #{metalakeId} AND deleted_at = 0")
Integer softDeleteFilesetMetasByMetalakeId(@Param("metalakeId") Long metalakeId);

@Update(
"UPDATE "
+ META_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE catalog_id = #{catalogId} AND deleted_at = 0")
Integer softDeleteFilesetMetasByCatalogId(@Param("catalogId") Long catalogId);

@Update(
"UPDATE "
+ META_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE schema_id = #{schemaId} AND deleted_at = 0")
Integer softDeleteFilesetMetasBySchemaId(@Param("schemaId") Long schemaId);

@Update(
"UPDATE "
+ META_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE fileset_id = #{filesetId} AND deleted_at = 0")
Integer softDeleteFilesetMetasByFilesetId(@Param("filesetId") Long filesetId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,28 +73,28 @@ void insertFilesetVersionOnDuplicateKeyUpdate(
@Update(
"UPDATE "
+ VERSION_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE metalake_id = #{metalakeId} AND deleted_at = 0")
Integer softDeleteFilesetVersionsByMetalakeId(@Param("metalakeId") Long metalakeId);

@Update(
"UPDATE "
+ VERSION_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE catalog_id = #{catalogId} AND deleted_at = 0")
Integer softDeleteFilesetVersionsByCatalogId(@Param("catalogId") Long catalogId);

@Update(
"UPDATE "
+ VERSION_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE schema_id = #{schemaId} AND deleted_at = 0")
Integer softDeleteFilesetVersionsBySchemaId(@Param("schemaId") Long schemaId);

@Update(
"UPDATE "
+ VERSION_TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE fileset_id = #{filesetId} AND deleted_at = 0")
Integer softDeleteFilesetVersionsByFilesetId(@Param("filesetId") Long filesetId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ Integer updateMetalakeMeta(
@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE metalake_id = #{metalakeId} AND deleted_at = 0")
Integer softDeleteMetalakeMetaByMetalakeId(@Param("metalakeId") Long metalakeId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ SchemaPO selectSchemaMetaByCatalogIdAndName(
+ " AND schema_name = #{oldSchemaMeta.schemaName}"
+ " AND metalake_id = #{oldSchemaMeta.metalakeId}"
+ " AND catalog_id = #{oldSchemaMeta.catalogId}"
+ " AND schema_comment = #{oldSchemaMeta.schemaComment}"
+ " AND (schema_comment IS NULL OR schema_comment = #{oldSchemaMeta.schemaComment})"
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is to fix the problem that schema_comment is nullable

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we set this parameter to Not Null and use "" as the default value?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Comment null and empty are different for many catalogs, so I don't think it's better than the previous design.

+ " AND properties = #{oldSchemaMeta.properties}"
+ " AND audit_info = #{oldSchemaMeta.auditInfo}"
+ " AND current_version = #{oldSchemaMeta.currentVersion}"
Expand All @@ -132,21 +132,21 @@ Integer updateSchemaMeta(
@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE schema_id = #{schemaId} AND deleted_at = 0")
Integer softDeleteSchemaMetasBySchemaId(@Param("schemaId") Long schemaId);

@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE metalake_id = #{metalakeId} AND deleted_at = 0")
Integer softDeleteSchemaMetasByMetalakeId(@Param("metalakeId") Long metalakeId);

@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE catalog_id = #{catalogId} AND deleted_at = 0")
Integer softDeleteSchemaMetasByCatalogId(@Param("catalogId") Long catalogId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -127,28 +127,28 @@ Integer updateTableMeta(
@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE table_id = #{tableId} AND deleted_at = 0")
Integer softDeleteTableMetasByTableId(@Param("tableId") Long tableId);

@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE metalake_id = #{metalakeId} AND deleted_at = 0")
Integer softDeleteTableMetasByMetalakeId(@Param("metalakeId") Long metalakeId);

@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE catalog_id = #{catalogId} AND deleted_at = 0")
Integer softDeleteTableMetasByCatalogId(@Param("catalogId") Long catalogId);

@Update(
"UPDATE "
+ TABLE_NAME
+ " SET deleted_at = UNIX_TIMESTAMP()"
+ " SET deleted_at = UNIX_TIMESTAMP(CURRENT_TIMESTAMP(3)) * 1000.0"
+ " WHERE schema_id = #{schemaId} AND deleted_at = 0")
Integer softDeleteTableMetasBySchemaId(@Param("schemaId") Long schemaId);
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,12 @@
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/** The service class for table metadata. It provides the basic database operations for table. */
yuqi1129 marked this conversation as resolved.
Show resolved Hide resolved
public class TableMetaService {
private static final Logger LOG = LoggerFactory.getLogger(TableMetaService.class);
private static final TableMetaService INSTANCE = new TableMetaService();

public static TableMetaService getInstance() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,24 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.MySQLContainer;

@ExtendWith(PrintFuncNameExtension.class)
public class AbstractIT {
Expand All @@ -60,6 +65,13 @@ public class AbstractIT {

protected static boolean ignoreIcebergRestService = true;

private static final String MYSQL_DOCKER_IMAGE_VERSION = "mysql:8.0";
private static final String DOWNLOAD_JDBC_DRIVER_URL =
"https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.26/mysql-connector-java-8.0.26.jar";

private static final String META_DATA = "metadata";
private static MySQLContainer<?> MYSQL_CONTAINER;

protected static String serverUri;

public static int getGravitinoServerPort() {
Expand Down Expand Up @@ -98,6 +110,48 @@ private static void recoverGravitinoServerConfig() throws IOException {
Files.move(tmpPath, configPath);
}

protected static void downLoadMySQLDriver(String relativeDeployLibsPath) throws IOException {
if (!ITUtils.EMBEDDED_TEST_MODE.equals(testMode)) {
String gravitinoHome = System.getenv("GRAVITINO_HOME");
java.nio.file.Path tmpPath = Paths.get(gravitinoHome, relativeDeployLibsPath);
JdbcDriverDownloader.downloadJdbcDriver(DOWNLOAD_JDBC_DRIVER_URL, tmpPath.toString());
}
}

private static void setMySQLBackend() {
String mysqlUrl = MYSQL_CONTAINER.getJdbcUrl();
customConfigs.put(Configs.ENTITY_STORE.getKey(), "relational");
yuqi1129 marked this conversation as resolved.
Show resolved Hide resolved
customConfigs.put(Configs.ENTITY_RELATIONAL_STORE.getKey(), "JDBCBackend");
yuqi1129 marked this conversation as resolved.
Show resolved Hide resolved
customConfigs.put(Configs.ENTITY_RELATIONAL_JDBC_BACKEND_URL.getKey(), mysqlUrl);
customConfigs.put(
Configs.ENTITY_RELATIONAL_JDBC_BACKEND_DRIVER.getKey(), "com.mysql.cj.jdbc.Driver");
customConfigs.put(Configs.ENTITY_RELATIONAL_JDBC_BACKEND_USER.getKey(), "root");
customConfigs.put(Configs.ENTITY_RELATIONAL_JDBC_BACKEND_PASSWORD.getKey(), "root");

LOG.info("MySQL URL: {}", mysqlUrl);
// Connect to the mysql docker and create a databases
try (Connection connection =
DriverManager.getConnection(
StringUtils.substring(mysqlUrl, 0, mysqlUrl.lastIndexOf("/")), "root", "root")) {

final Statement statement = connection.createStatement();
yuqi1129 marked this conversation as resolved.
Show resolved Hide resolved
statement.execute("drop database if exists " + META_DATA);
statement.execute("create database " + META_DATA);
String gravitinoHome = System.getenv("GRAVITINO_ROOT_DIR");
String mysqlContent =
FileUtils.readFileToString(
new File(gravitinoHome + "/core/src/main/resources/mysql/mysql_init.sql"), "UTF-8");
String[] sqls = mysqlContent.split(";");
sqls = ArrayUtils.addFirst(sqls, "use " + META_DATA + ";");
for (String sql : sqls) {
statement.execute(sql);
}
} catch (Exception e) {
LOG.error("Failed to create database in mysql", e);
throw new RuntimeException(e);
}
}

@BeforeAll
public static void startIntegrationTest() throws Exception {
testMode =
Expand All @@ -107,6 +161,18 @@ public static void startIntegrationTest() throws Exception {

LOG.info("Running Gravitino Server in {} mode", testMode);

if ("true".equals(System.getenv("jdbcBackend"))) {
// start mysql docker
MYSQL_CONTAINER =
new MySQLContainer<>(MYSQL_DOCKER_IMAGE_VERSION)
.withDatabaseName(META_DATA)
.withUsername("root")
.withPassword("root");
MYSQL_CONTAINER.start();

setMySQLBackend();
}

serverConfig = new ServerConfig();
if (testMode != null && testMode.equals(ITUtils.EMBEDDED_TEST_MODE)) {
MiniGravitinoContext context =
Expand All @@ -117,6 +183,7 @@ public static void startIntegrationTest() throws Exception {
} else {
rewriteGravitinoServerConfig();
serverConfig.loadFromFile(GravitinoServer.CONF_FILE);
downLoadMySQLDriver("/libs");
try {
FileUtils.deleteDirectory(
FileUtils.getFile(serverConfig.get(ENTRY_KV_ROCKSDB_BACKEND_PATH)));
Expand Down Expand Up @@ -168,6 +235,10 @@ public static void stopIntegrationTest() throws IOException, InterruptedExceptio
}
customConfigs.clear();
LOG.info("Tearing down Gravitino Server");

if (MYSQL_CONTAINER != null) {
MYSQL_CONTAINER.stop();
}
}

public static GravitinoAdminClient getGravitinoClient() {
Expand Down
Loading