Skip to content

Commit

Permalink
[Migrate][Test] Migrate all UT cases from jUnit 4 to 5 in datasource,…
Browse files Browse the repository at this point in the history
… registry and data-quality modules (#12352)

* Migrate all UT cases from jUnit 4 to 5 in datasource, registry and data-quality modules

* Fix etcd registry test
  • Loading branch information
EricGao888 authored Oct 14, 2022
1 parent 9652964 commit ac84504
Show file tree
Hide file tree
Showing 42 changed files with 381 additions and 364 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import java.util.HashMap;
import java.util.Map;

import org.junit.Before;
import org.junit.jupiter.api.BeforeEach;

/**
* SparkApplicationTestBase
Expand All @@ -32,14 +32,14 @@ public class SparkApplicationTestBase {

protected SparkRuntimeEnvironment sparkRuntimeEnvironment;

@Before
@BeforeEach
public void init() {
Map<String,Object> config = new HashMap<>();
config.put("spark.app.name","data quality test");
config.put("spark.sql.crossJoin.enabled","true");
config.put("spark.driver.bindAddress","127.0.0.1");
config.put("spark.ui.port",13000);
config.put("spark.master","local[4]");
Map<String, Object> config = new HashMap<>();
config.put("spark.app.name", "data quality test");
config.put("spark.sql.crossJoin.enabled", "true");
config.put("spark.driver.bindAddress", "127.0.0.1");
config.put("spark.ui.port", 13000);
config.put("spark.master", "local[4]");

sparkRuntimeEnvironment = new SparkRuntimeEnvironment(new Config(config));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration;
import org.apache.dolphinscheduler.data.quality.utils.JsonUtils;

import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

/**
* ConfigurationParserTest
Expand All @@ -30,7 +30,7 @@ public class ConfigurationParserTest {

@Test
public void testConfigurationValidate() {
Assert.assertEquals(1,verifyConfigurationValidate());
Assertions.assertEquals(1, verifyConfigurationValidate());
}

private int verifyConfigurationValidate() {
Expand All @@ -50,7 +50,8 @@ private int verifyConfigurationValidate() {
+ " threshold, 3 as operator, 0 as failure_strategy, '2021-06-29 10:18:59' as create_time,'2021-06-29 10:18:59' as update_time "
+ "from miss_count FULL JOIN total_count\"} }]}";

DataQualityConfiguration dataQualityConfiguration = JsonUtils.fromJson(parameterStr,DataQualityConfiguration.class);
DataQualityConfiguration dataQualityConfiguration =
JsonUtils.fromJson(parameterStr, DataQualityConfiguration.class);
dataQualityConfiguration.validate();
} catch (Exception e) {
flag = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,16 @@
import java.util.HashMap;
import java.util.Map;

import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

/**
* JdbcConnectorTest
*/
public class JdbcReaderTest extends FlowTestBase {

@Before
@BeforeEach
public void before() {
super.init();
createConnectorTable();
Expand All @@ -50,17 +50,17 @@ public void before() {
@Test
public void testJdbcConnectorExecute() {
JdbcReader jdbcReader = new JdbcReader(buildReaderConfig());
Assert.assertNotNull(jdbcReader.read(sparkRuntimeEnvironment));
Assertions.assertNotNull(jdbcReader.read(sparkRuntimeEnvironment));
}

private Config buildReaderConfig() {
Map<String,Object> config = new HashMap<>();
config.put(DATABASE,"test");
config.put(TABLE,"test.test1");
config.put(URL,url);
config.put(USER,"test");
config.put(PASSWORD,"123456");
config.put(DRIVER,driver);
Map<String, Object> config = new HashMap<>();
config.put(DATABASE, "test");
config.put(TABLE, "test.test1");
config.put(URL, url);
config.put(USER, "test");
config.put(PASSWORD, "123456");
config.put(DRIVER, driver);
return new Config(config);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@
import java.util.List;
import java.util.Map;

import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

/**
* ConnectorFactoryTest
Expand All @@ -48,23 +48,23 @@ public void testConnectorGenerate() throws DataQualityException {
List<ReaderConfig> readerConfigs = new ArrayList<>();
ReaderConfig readerConfig = new ReaderConfig();
readerConfig.setType("JDBC");
Map<String,Object> config = new HashMap<>();
config.put(DATABASE,"test");
config.put(TABLE,"test1");
config.put(URL,"jdbc:mysql://localhost:3306/test");
config.put(USER,"test");
config.put(PASSWORD,"123456");
config.put(DRIVER,"com.mysql.cj.jdbc.Driver");
Map<String, Object> config = new HashMap<>();
config.put(DATABASE, "test");
config.put(TABLE, "test1");
config.put(URL, "jdbc:mysql://localhost:3306/test");
config.put(USER, "test");
config.put(PASSWORD, "123456");
config.put(DRIVER, "com.mysql.cj.jdbc.Driver");
readerConfig.setConfig(config);
readerConfigs.add(readerConfig);

int flag = 0;

List<BatchReader> readers = ReaderFactory.getInstance().getReaders(null,readerConfigs);
List<BatchReader> readers = ReaderFactory.getInstance().getReaders(null, readerConfigs);
if (readers != null && readers.size() >= 1) {
flag = 1;
}

Assert.assertEquals(1,flag);
Assertions.assertEquals(1, flag);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,15 @@
import java.util.HashMap;
import java.util.Map;

import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

/**
* JdbcWriterTest
*/
public class JdbcWriterTest extends FlowTestBase {

@Before
@BeforeEach
public void before() {
super.init();
createWriterTable();
Expand All @@ -51,24 +51,24 @@ public void before() {
public void testJdbcWriterExecute() {
JdbcReader jdbcConnector = new JdbcReader(buildJdbcReaderConfig());
JdbcWriter jdbcWriter = new JdbcWriter(buildJdbcConfig());
jdbcWriter.write(jdbcConnector.read(sparkRuntimeEnvironment),sparkRuntimeEnvironment);
jdbcWriter.write(jdbcConnector.read(sparkRuntimeEnvironment), sparkRuntimeEnvironment);
}

private Config buildJdbcConfig() {
Map<String,Object> config = new HashMap<>();
config.put(DATABASE,"test");
config.put(TABLE,"test.test2");
config.put(URL,url);
config.put(USER,"test");
config.put(PASSWORD,"123456");
config.put(DRIVER,driver);
config.put("save_mode","append");
Map<String, Object> config = new HashMap<>();
config.put(DATABASE, "test");
config.put(TABLE, "test.test2");
config.put(URL, url);
config.put(USER, "test");
config.put(PASSWORD, "123456");
config.put(DRIVER, driver);
config.put("save_mode", "append");
return new Config(config);
}

private Config buildJdbcReaderConfig() {
Config config = buildJdbcConfig();
config.put("sql","SELECT '1' as company,'1' as date,'2' as c1,'2' as c2,'2' as c3, 2 as c4");
config.put("sql", "SELECT '1' as company,'1' as date,'2' as c1,'2' as c2,'2' as c3, 2 as c4");
return config;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
import java.util.ArrayList;
import java.util.List;

import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

/**
* WriterFactoryTest
Expand All @@ -44,11 +44,11 @@ public void testWriterGenerate() throws DataQualityException {

int flag = 0;

List<BatchWriter> writers = WriterFactory.getInstance().getWriters(null,writerConfigs);
List<BatchWriter> writers = WriterFactory.getInstance().getWriters(null, writerConfigs);
if (writers != null && writers.size() >= 1) {
flag = 1;
}

Assert.assertEquals(1,flag);
Assertions.assertEquals(1, flag);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@
import java.util.HashMap;
import java.util.Map;

import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

public class ConfigUtilsTest {

@Test
public void testExtractSubConfig() {
// Setup
Map<String,Object> configMap = new HashMap<>();
configMap.put("aaa.www","1");
configMap.put("bbb.www","1");
Map<String, Object> configMap = new HashMap<>();
configMap.put("aaa.www", "1");
configMap.put("bbb.www", "1");

final Config source = new Config(configMap);

Expand All @@ -41,6 +41,6 @@ public void testExtractSubConfig() {
int expect = 1;
int actual = result.entrySet().size();

Assert.assertEquals(expect,actual);
Assertions.assertEquals(expect, actual);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@

import java.sql.Connection;

import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.jupiter.MockitoExtension;

@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class CommonDataSourceClientTest {

@Mock
Expand Down Expand Up @@ -83,6 +83,6 @@ public void testCheckClient() {
public void testGetConnection() {
Connection connection = Mockito.mock(Connection.class);
Mockito.when(commonDataSourceClient.getConnection()).thenReturn(connection);
Assert.assertNotNull(commonDataSourceClient.getConnection());
Assertions.assertNotNull(commonDataSourceClient.getConnection());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import java.util.HashMap;
import java.util.Map;

import org.junit.Test;
import org.junit.jupiter.api.Test;

public class AbstractDataSourceProcessorTest {

Expand Down
Loading

0 comments on commit ac84504

Please sign in to comment.