Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate selected Iceberg tests #19505

Merged
merged 3 commits into from
Oct 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions plugin/trino-iceberg/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -576,6 +576,12 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@
import org.apache.iceberg.types.Types.DecimalType;
import org.apache.iceberg.types.Types.DoubleType;
import org.apache.iceberg.types.Types.FloatType;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;

import java.math.BigDecimal;
import java.nio.ByteBuffer;
Expand Down Expand Up @@ -187,7 +188,8 @@ public void testBucketingSpecValues()
assertBucketAndHashEquals("binary", ByteBuffer.wrap(new byte[] {0x00, 0x01, 0x02, 0x03}), -188683207 & Integer.MAX_VALUE);
}

@Test(dataProvider = "unsupportedBucketingTypes")
@ParameterizedTest
@MethodSource("unsupportedBucketingTypes")
public void testUnsupportedTypes(Type type)
{
assertThatThrownBy(() -> computeIcebergBucket(type, null, 1))
Expand All @@ -197,8 +199,7 @@ public void testUnsupportedTypes(Type type)
.hasMessage("Unsupported type for 'bucket': %s", toTrinoType(type, TYPE_MANAGER));
}

@DataProvider
public Object[][] unsupportedBucketingTypes()
public static Object[][] unsupportedBucketingTypes()
{
return new Object[][] {
{BooleanType.get()},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,11 @@
import io.trino.testing.DistributedQueryRunner;
import org.apache.iceberg.util.ThreadPools;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.parallel.Execution;
import org.junit.jupiter.api.parallel.ExecutionMode;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;

import java.io.File;
import java.util.Optional;
Expand All @@ -49,14 +52,14 @@
import static io.trino.plugin.hive.metastore.file.TestingFileHiveMetastore.createTestingFileHiveMetastore;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergSessionProperties.COLLECT_EXTENDED_STATISTICS_ON_WRITE;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.FileType.DATA;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.FileType.MANIFEST;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.FileType.METADATA_JSON;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.FileType.SNAPSHOT;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.FileType.STATS;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.FileType.fromFilePath;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.Scope.ALL_FILES;
import static io.trino.plugin.iceberg.TestIcebergMetadataFileOperations.Scope.METADATA_FILES;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.FileType.DATA;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.FileType.MANIFEST;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.FileType.METADATA_JSON;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.FileType.SNAPSHOT;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.FileType.STATS;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.FileType.fromFilePath;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.Scope.ALL_FILES;
import static io.trino.plugin.iceberg.TestIcebergFileOperations.Scope.METADATA_FILES;
import static io.trino.testing.MultisetAssertions.assertMultisetsEqual;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.testing.TestingSession.testSessionBuilder;
Expand All @@ -66,8 +69,8 @@
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toCollection;

@Test(singleThreaded = true) // e.g. trackingFileSystemFactory is shared mutable state
public class TestIcebergMetadataFileOperations
@Execution(ExecutionMode.SAME_THREAD) // e.g. trackingFileSystemFactory is shared mutable state
public class TestIcebergFileOperations
extends AbstractTestQueryFramework
{
private static final int MAX_PREFIXES_COUNT = 10;
Expand Down Expand Up @@ -167,7 +170,8 @@ public void testSelect()
.build());
}

@Test(dataProvider = "testSelectWithLimitDataProvider")
@ParameterizedTest
@MethodSource("testSelectWithLimitDataProvider")
public void testSelectWithLimit(int numberOfFiles)
{
assertUpdate("DROP TABLE IF EXISTS test_select_with_limit"); // test is parameterized
Expand Down Expand Up @@ -210,7 +214,6 @@ public void testSelectWithLimit(int numberOfFiles)
assertUpdate("DROP TABLE test_select_with_limit");
}

@DataProvider
public Object[][] testSelectWithLimitDataProvider()
{
return new Object[][] {
Expand Down Expand Up @@ -594,7 +597,8 @@ public void testRemoveOrphanFiles()
assertUpdate("DROP TABLE " + tableName);
}

@Test(dataProvider = "metadataQueriesTestTableCountDataProvider")
@ParameterizedTest
@MethodSource("metadataQueriesTestTableCountDataProvider")
public void testInformationSchemaColumns(int tables)
{
String schemaName = "test_i_s_columns_schema" + randomNameSuffix();
Expand Down Expand Up @@ -636,7 +640,8 @@ public void testInformationSchemaColumns(int tables)
}
}

@Test(dataProvider = "metadataQueriesTestTableCountDataProvider")
@ParameterizedTest
@MethodSource("metadataQueriesTestTableCountDataProvider")
public void testSystemMetadataTableComments(int tables)
{
String schemaName = "test_s_m_table_comments" + randomNameSuffix();
Expand Down Expand Up @@ -678,7 +683,6 @@ public void testSystemMetadataTableComments(int tables)
}
}

@DataProvider
public Object[][] metadataQueriesTestTableCountDataProvider()
{
return new Object[][] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,11 @@
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.DistributedQueryRunner;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.parallel.Execution;
import org.junit.jupiter.api.parallel.ExecutionMode;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;

import java.io.File;
import java.util.Optional;
Expand All @@ -50,7 +53,7 @@
import static io.trino.testing.TestingSession.testSessionBuilder;
import static org.assertj.core.api.Assertions.assertThat;

@Test(singleThreaded = true) // metastore invocation counters shares mutable state so can't be run from many threads simultaneously
@Execution(ExecutionMode.SAME_THREAD) // metastore invocation counters shares mutable state so can't be run from many threads simultaneously
public class TestIcebergMetastoreAccessOperations
extends AbstractTestQueryFramework
{
Expand Down Expand Up @@ -326,7 +329,8 @@ public void testUnregisterTable()
.build());
}

@Test(dataProvider = "metadataQueriesTestTableCountDataProvider")
@ParameterizedTest
@MethodSource("metadataQueriesTestTableCountDataProvider")
public void testInformationSchemaColumns(int tables)
{
String schemaName = "test_i_s_columns_schema" + randomNameSuffix();
Expand Down Expand Up @@ -371,7 +375,8 @@ public void testInformationSchemaColumns(int tables)
}
}

@Test(dataProvider = "metadataQueriesTestTableCountDataProvider")
@ParameterizedTest
@MethodSource("metadataQueriesTestTableCountDataProvider")
public void testSystemMetadataTableComments(int tables)
{
String schemaName = "test_s_m_table_comments" + randomNameSuffix();
Expand Down Expand Up @@ -417,7 +422,6 @@ public void testSystemMetadataTableComments(int tables)
}
}

@DataProvider
public Object[][] metadataQueriesTestTableCountDataProvider()
{
return new Object[][] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.DistributedQueryRunner;
import io.trino.testing.QueryRunner;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;

import java.nio.file.Files;
import java.nio.file.Path;
Expand All @@ -47,14 +48,15 @@ protected QueryRunner createQueryRunner()
DistributedQueryRunner queryRunner = IcebergQueryRunner.builder().setMetastoreDirectory(dataDirectory.toFile()).build();
queryRunner.installPlugin(new TestingHivePlugin());
queryRunner.createCatalog("hive", "hive", ImmutableMap.<String, String>builder()
.put("hive.metastore", "file")
.put("hive.metastore.catalog.dir", dataDirectory.toString())
.put("hive.security", "allow-all")
.put("hive.metastore", "file")
.put("hive.metastore.catalog.dir", dataDirectory.toString())
.put("hive.security", "allow-all")
.buildOrThrow());
return queryRunner;
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@MethodSource("fileFormats")
public void testMigrateTable(IcebergFileFormat fileFormat)
{
String tableName = "test_migrate_" + randomNameSuffix();
Expand All @@ -78,7 +80,8 @@ public void testMigrateTable(IcebergFileFormat fileFormat)
assertUpdate("DROP TABLE " + tableName);
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@MethodSource("fileFormats")
public void testMigrateTableWithTinyintType(IcebergFileFormat fileFormat)
{
String tableName = "test_migrate_tinyint" + randomNameSuffix();
Expand All @@ -105,7 +108,8 @@ public void testMigrateTableWithTinyintType(IcebergFileFormat fileFormat)
assertUpdate("DROP TABLE " + tableName);
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@MethodSource("fileFormats")
public void testMigrateTableWithSmallintType(IcebergFileFormat fileFormat)
{
String tableName = "test_migrate_smallint" + randomNameSuffix();
Expand All @@ -132,7 +136,6 @@ public void testMigrateTableWithSmallintType(IcebergFileFormat fileFormat)
assertUpdate("DROP TABLE " + tableName);
}

@DataProvider
public static Object[][] fileFormats()
{
return Stream.of(IcebergFileFormat.values())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,18 @@
import org.apache.iceberg.Table;
import org.apache.iceberg.hadoop.HadoopTables;
import org.apache.iceberg.types.Types;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;

import static com.google.common.base.Verify.verify;
import static com.google.common.io.MoreFiles.deleteRecursively;
Expand Down Expand Up @@ -80,28 +80,21 @@ protected QueryRunner createQueryRunner()
.build();
}

@BeforeClass
@BeforeAll
public void initFileSystem()
{
fileSystem = getFileSystemFactory(getDistributedQueryRunner()).create(SESSION);
}

@AfterClass(alwaysRun = true)
@AfterAll
public void tearDown()
throws IOException
{
deleteRecursively(metastoreDir.toPath(), ALLOW_INSECURE);
}

@DataProvider
public static Object[][] fileFormats()
{
return Stream.of(IcebergFileFormat.values())
.map(icebergFileFormat -> new Object[] {icebergFileFormat})
.toArray(Object[][]::new);
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithTableLocation(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_table_location_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -123,7 +116,8 @@ public void testRegisterTableWithTableLocation(IcebergFileFormat icebergFileForm
assertUpdate(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterPartitionedTable(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_partitioned_table_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -145,7 +139,8 @@ public void testRegisterPartitionedTable(IcebergFileFormat icebergFileFormat)
assertUpdate("DROP TABLE " + tableName);
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithComments(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_comments_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -171,7 +166,8 @@ public void testRegisterTableWithComments(IcebergFileFormat icebergFileFormat)
assertUpdate(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithShowCreateTable(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_show_create_table_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -191,7 +187,8 @@ public void testRegisterTableWithShowCreateTable(IcebergFileFormat icebergFileFo
assertUpdate(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithReInsert(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_re_insert_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -215,7 +212,8 @@ public void testRegisterTableWithReInsert(IcebergFileFormat icebergFileFormat)
assertUpdate(format("DROP TABLE %s", tableName));
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithDroppedTable(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_dropped_table_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -233,7 +231,8 @@ public void testRegisterTableWithDroppedTable(IcebergFileFormat icebergFileForma
".*No versioned metadata file exists at location.*");
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithDifferentTableName(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_different_table_name_old_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand All @@ -258,7 +257,8 @@ public void testRegisterTableWithDifferentTableName(IcebergFileFormat icebergFil
assertUpdate(format("DROP TABLE %s", tableNameNew));
}

@Test(dataProvider = "fileFormats")
@ParameterizedTest
@EnumSource(IcebergFileFormat.class)
public void testRegisterTableWithMetadataFile(IcebergFileFormat icebergFileFormat)
{
String tableName = "test_register_table_with_metadata_file_" + icebergFileFormat.name().toLowerCase(ENGLISH) + "_" + randomNameSuffix();
Expand Down
Loading
Loading