Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[fix](Export) Set the default value of the data_consistence property of export to partition #32830

Merged
merged 5 commits into from
Apr 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 10 additions & 8 deletions fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public class ExportStmt extends StatementBase {
private String maxFileSize;
private String deleteExistingFiles;
private String withBom;
private String dataConsistency;
private String dataConsistency = ExportJob.CONSISTENT_PARTITION;
private SessionVariable sessionVariables;

private String qualifiedUser;
Expand Down Expand Up @@ -365,14 +365,16 @@ private void checkProperties(Map<String, String> properties) throws UserExceptio
this.withBom = properties.getOrDefault(OutFileClause.PROP_WITH_BOM, "false");

// data consistency
String dataConsistencyStr = properties.get(DATA_CONSISTENCY);
if (dataConsistencyStr != null) {
if (!dataConsistencyStr.equalsIgnoreCase(ExportJob.CONSISTENT_PARTITION)) {
throw new UserException("The value of data_consistency is invalid, only `partition` is allowed");
if (properties.containsKey(DATA_CONSISTENCY)) {
String dataConsistencyStr = properties.get(DATA_CONSISTENCY);
if (ExportJob.CONSISTENT_NONE.equalsIgnoreCase(dataConsistencyStr)) {
this.dataConsistency = ExportJob.CONSISTENT_NONE;
} else if (ExportJob.CONSISTENT_PARTITION.equalsIgnoreCase(dataConsistencyStr)) {
this.dataConsistency = ExportJob.CONSISTENT_PARTITION;
} else {
throw new AnalysisException("The value of data_consistency is invalid, please use `"
+ ExportJob.CONSISTENT_PARTITION + "`/`" + ExportJob.CONSISTENT_NONE + "`");
}
this.dataConsistency = ExportJob.CONSISTENT_PARTITION;
} else {
this.dataConsistency = ExportJob.CONSISTENT_NONE;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ public ExportJob() {
this.lineDelimiter = "\n";
this.columns = "";
this.withBom = "false";
this.dataConsistency = "all";
this.dataConsistency = CONSISTENT_PARTITION;
}

public ExportJob(long jobId) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -313,14 +313,16 @@ private ExportJob generateExportJob(ConnectContext ctx, Map<String, String> file
exportJob.setUserIdentity(ctx.getCurrentUserIdentity());

// set data consistency
String dataConsistencyStr = fileProperties.get(DATA_CONSISTENCY);
if (dataConsistencyStr != null) {
if (!dataConsistencyStr.equalsIgnoreCase(ExportJob.CONSISTENT_PARTITION)) {
throw new AnalysisException("The value of data_consistency is invalid, only partition is allowed!");
if (fileProperties.containsKey(DATA_CONSISTENCY)) {
String dataConsistencyStr = fileProperties.get(DATA_CONSISTENCY);
if (ExportJob.CONSISTENT_NONE.equalsIgnoreCase(dataConsistencyStr)) {
exportJob.setDataConsistency(ExportJob.CONSISTENT_NONE);
} else if (ExportJob.CONSISTENT_PARTITION.equalsIgnoreCase(dataConsistencyStr)) {
exportJob.setDataConsistency(ExportJob.CONSISTENT_PARTITION);
} else {
throw new AnalysisException("The value of data_consistency is invalid, please use `"
+ ExportJob.CONSISTENT_PARTITION + "`/`" + ExportJob.CONSISTENT_NONE + "`");
}
exportJob.setDataConsistency(ExportJob.CONSISTENT_PARTITION);
} else {
exportJob.setDataConsistency(ExportJob.CONSISTENT_NONE);
}

// Must copy session variable, because session variable may be changed during export job running.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,10 @@ protected void runBeforeAll() throws Exception {
public void testNormal() throws UserException {
// The origin export sql
String exportSql = "EXPORT TABLE testDb.table1\n"
+ "TO \"file:///tmp/exp_\";";
+ "TO \"file:///tmp/exp_\" "
+ "PROPERTIES(\n"
+ "\"data_consistency\" = \"none\"\n"
+ ");";

List<Long> currentTablets1 = Arrays.asList(10010L, 10012L, 10014L, 10016L, 10018L, 10020L, 10022L, 10024L,
10026L, 10028L);
Expand Down Expand Up @@ -126,7 +129,8 @@ public void testNormalParallelism() throws UserException {
String exportSql = "EXPORT TABLE testDb.table1\n"
+ "TO \"file:///tmp/exp_\" "
+ "PROPERTIES(\n"
+ "\"parallelism\" = \"4\"\n"
+ "\"parallelism\" = \"4\",\n"
+ "\"data_consistency\" = \"none\"\n"
+ ");";

// This export sql should generate 4 array, and there should be 1 outfile sql in per array.
Expand Down Expand Up @@ -180,7 +184,8 @@ public void testMultiOutfilePerParalle() throws UserException {
String exportSql = "EXPORT TABLE testDb.table1\n"
+ "TO \"file:///tmp/exp_\" "
+ "PROPERTIES(\n"
+ "\"parallelism\" = \"3\"\n"
+ "\"parallelism\" = \"3\",\n"
+ "\"data_consistency\" = \"none\"\n"
+ ");";

// This export sql should generate 4 array, and there should be 1 outfile sql in per array.
Expand Down Expand Up @@ -240,7 +245,8 @@ public void testPartitionParallelism() throws UserException {
String exportSql = "EXPORT TABLE testDb.table1 PARTITION (p1)\n"
+ "TO \"file:///tmp/exp_\" "
+ "PROPERTIES(\n"
+ "\"parallelism\" = \"4\"\n"
+ "\"parallelism\" = \"4\",\n"
+ "\"data_consistency\" = \"none\"\n"
+ ");";

// This export sql should generate 4 array, and there should be 1 outfile sql in per array.
Expand Down Expand Up @@ -293,7 +299,8 @@ public void testMultiPartitionParallelism() throws UserException {
String exportSql = "EXPORT TABLE testDb.table1 PARTITION (p1, p4)\n"
+ "TO \"file:///tmp/exp_\" "
+ "PROPERTIES(\n"
+ "\"parallelism\" = \"4\"\n"
+ "\"parallelism\" = \"4\",\n"
+ "\"data_consistency\" = \"none\"\n"
+ ");";

// This export sql should generate 4 array, and there should be 1 outfile sql in per array.
Expand Down Expand Up @@ -344,7 +351,8 @@ public void testParallelismLessThanTablets() throws UserException {
String exportSql = "EXPORT TABLE testDb.table1 PARTITION (p1)\n"
+ "TO \"file:///tmp/exp_\" "
+ "PROPERTIES(\n"
+ "\"parallelism\" = \"20\"\n"
+ "\"parallelism\" = \"20\",\n"
+ "\"data_consistency\" = \"none\"\n"
+ ");";

// This export sql should generate 10 array because parallelism is less than the number of tablets,
Expand Down
33 changes: 22 additions & 11 deletions regression-test/suites/export_p0/test_export_basic.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label)
Expand Down Expand Up @@ -214,7 +215,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label)
Expand Down Expand Up @@ -279,7 +281,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label)
Expand Down Expand Up @@ -344,7 +347,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label)
Expand Down Expand Up @@ -411,7 +415,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label1}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
sql """
Expand All @@ -420,7 +425,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label2}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label1)
Expand Down Expand Up @@ -454,7 +460,8 @@ suite("test_export_basic", "p0") {
"label" = "${label}",
"format" = "csv",
"column_separator"=",",
"columns" = "id, name"
"columns" = "id, name",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label)
Expand Down Expand Up @@ -519,7 +526,8 @@ suite("test_export_basic", "p0") {
"label" = "${label}",
"format" = "csv",
"column_separator"=",",
"columns" = "id"
"columns" = "id",
"data_consistency" = "none"
);
"""
waiting_export.call(db, label)
Expand Down Expand Up @@ -588,7 +596,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(label_db, label)
Expand All @@ -601,7 +610,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
exception "has already been used"
Expand All @@ -625,7 +635,8 @@ suite("test_export_basic", "p0") {
PROPERTIES(
"label" = "${label}",
"format" = "csv",
"column_separator"=","
"column_separator"=",",
"data_consistency" = "none"
);
"""
waiting_export.call(label_db, label)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,7 @@ suite("test_export_data_consistency", "p0") {
"label" = "${label}",
"format" = "csv",
"column_separator" = ",",
"parallelism" = "10",
"data_consistency" = "partition"
"parallelism" = "10"
);
"""
// do insert in parallel
Expand Down
Loading