diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java index 6613fd71a9b5f8..bb1a3074ea1c7a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java @@ -382,14 +382,17 @@ public PlanFragment visitPhysicalFileSink(PhysicalFileSink fileS .forEach(exprId -> outputExprs.add(context.findSlotRef(exprId))); rootFragment.setOutputExprs(outputExprs); + // generate colLabels + List labels = fileSink.getOutput().stream().map(NamedExpression::getName).collect(Collectors.toList()); + // TODO: should not call legacy planner analyze in Nereids try { - outFile.analyze(null, outputExprs, - fileSink.getOutput().stream().map(NamedExpression::getName).collect(Collectors.toList())); + outFile.analyze(null, outputExprs, labels); } catch (Exception e) { throw new AnalysisException(e.getMessage(), e.getCause()); } - ResultFileSink sink = new ResultFileSink(rootFragment.getPlanRoot().getId(), outFile); + ResultFileSink sink = new ResultFileSink(rootFragment.getPlanRoot().getId(), outFile, + (ArrayList) labels); rootFragment.setSink(sink); return rootFragment; diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java index 02ac6c6a31912a..6d7031f61c775e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java @@ -43,7 +43,7 @@ public class ResultFileSink extends DataSink { private String header = ""; private String headerType = ""; - public ResultFileSink(PlanNodeId exchNodeId, OutFileClause outFileClause) { + private ResultFileSink(PlanNodeId exchNodeId, OutFileClause outFileClause) { this.exchNodeId = exchNodeId; this.fileSinkOptions = outFileClause.toSinkOptions(); this.brokerName = outFileClause.getBrokerDesc() == null ? null : @@ -54,13 +54,13 @@ public ResultFileSink(PlanNodeId exchNodeId, OutFileClause outFileClause) { //gen header names private String genNames(ArrayList headerNames, String columnSeparator, String lineDelimiter) { - String names = ""; + StringBuilder sb = new StringBuilder(); for (String name : headerNames) { - names += name + columnSeparator; + sb.append(name).append(columnSeparator); } - names = names.substring(0, names.length() - columnSeparator.length()); - names += lineDelimiter; - return names; + String headerName = sb.substring(0, sb.length() - columnSeparator.length()); + headerName += lineDelimiter; + return headerName; } public ResultFileSink(PlanNodeId exchNodeId, OutFileClause outFileClause, ArrayList labels) { diff --git a/regression-test/data/export_p0/test_outfile_csv_with_names.out b/regression-test/data/export_p0/test_outfile_csv_with_names.out new file mode 100644 index 00000000000000..63fa1d523c8cf5 --- /dev/null +++ b/regression-test/data/export_p0/test_outfile_csv_with_names.out @@ -0,0 +1,25 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !select_default1 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + +-- !select_default2 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + diff --git a/regression-test/data/export_p0/test_outfile_csv_with_names_and_types.out b/regression-test/data/export_p0/test_outfile_csv_with_names_and_types.out new file mode 100644 index 00000000000000..63fa1d523c8cf5 --- /dev/null +++ b/regression-test/data/export_p0/test_outfile_csv_with_names_and_types.out @@ -0,0 +1,25 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !select_default1 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + +-- !select_default2 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + diff --git a/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names.out b/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names.out new file mode 100644 index 00000000000000..63fa1d523c8cf5 --- /dev/null +++ b/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names.out @@ -0,0 +1,25 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !select_default1 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + +-- !select_default2 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + diff --git a/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names_and_types.out b/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names_and_types.out new file mode 100644 index 00000000000000..63fa1d523c8cf5 --- /dev/null +++ b/regression-test/data/nereids_p0/outfile/test_outfile_csv_with_names_and_types.out @@ -0,0 +1,25 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !select_default1 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + +-- !select_default2 -- +1 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1 +10 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 \N \N \N \N \N \N \N \N \N \N \N +2 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2 +3 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3 +4 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4 +5 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5 +6 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6 +7 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7 +8 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8 +9 2017-10-01 2017-10-01T00:00 2017-10-01 2017-10-01T00:00 2017-10-01T00:00:00.111 2017-10-01T00:00:00.111111 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9 + diff --git a/regression-test/suites/export_p0/test_outfile_csv_with_names.groovy b/regression-test/suites/export_p0/test_outfile_csv_with_names.groovy new file mode 100644 index 00000000000000..9c62a994c47361 --- /dev/null +++ b/regression-test/suites/export_p0/test_outfile_csv_with_names.groovy @@ -0,0 +1,167 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Paths + +suite("test_outfile_csv_with_names") { + def dbName = "test_outfile_csv_with_names" + sql "CREATE DATABASE IF NOT EXISTS ${dbName}" + sql "USE $dbName" + StringBuilder strBuilder = new StringBuilder() + strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword) + strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe") + + String command = strBuilder.toString() + def process = command.toString().execute() + def code = process.waitFor() + def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + def out = process.getText() + logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def response = parseJson(out.trim()) + assertEquals(response.code, 0) + assertEquals(response.msg, "success") + def configJson = response.data.rows + boolean enableOutfileToLocal = false + for (Object conf: configJson) { + assert conf instanceof Map + if (((Map) conf).get("Name").toLowerCase() == "enable_outfile_to_local") { + enableOutfileToLocal = ((Map) conf).get("Value").toLowerCase() == "true" + } + } + if (!enableOutfileToLocal) { + logger.warn("Please set enable_outfile_to_local to true to run test_outfile") + return + } + def tableName = "outfil_csv_with_names_test" + def tableName2 = "outfil_csv_with_names_test2" + def uuid = UUID.randomUUID().toString() + def outFilePath = """/tmp/test_outfile_with_names_${uuid}""" + + try { + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + StringBuilder sb = new StringBuilder() + int i = 1 + for (; i < 10; i ++) { + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}), + """) + } + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + """) + sql """ INSERT INTO ${tableName} VALUES + ${sb.toString()} + """ + order_qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ + + // check outfile + File path = new File(outFilePath) + if (!path.exists()) { + assert path.mkdirs() + } else { + throw new IllegalStateException("""${outFilePath} already exists! """) + } + sql """ + SELECT * FROM ${tableName} t ORDER BY user_id INTO OUTFILE "file://${outFilePath}/" FORMAT AS CSV_WITH_NAMES + PROPERTIES("column_separator" = ","); + """ + + File[] files = path.listFiles() + assert files.length == 1 + // check column names + String columnNames = """user_id,date,datetime,date_1,datetime_1,datetime_2,datetime_3,city,age,sex,bool_col,int_col,bigint_col,largeint_col,float_col,double_col,char_col,decimal_col""" + + List outLines = Files.readAllLines(Paths.get(files[0].getAbsolutePath()), StandardCharsets.UTF_8); + assertEquals(columnNames, outLines.get(0)) + + // check data correctness + sql """ DROP TABLE IF EXISTS ${tableName2} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName2} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + + StringBuilder commandBuilder = new StringBuilder() + commandBuilder.append("""curl -v --location-trusted -u ${context.config.feHttpUser}:${context.config.feHttpPassword}""") + commandBuilder.append(""" -H format:csv_with_names -H column_separator:, -T """ + files[0].getAbsolutePath() + """ http://${context.config.feHttpAddress}/api/""" + dbName + "/" + tableName2 + "/_stream_load") + command = commandBuilder.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))) + out = process.getText() + logger.info("Run command: command=" + command + ",code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + order_qt_select_default2 """ SELECT * FROM ${tableName2} t ORDER BY user_id; """ + } finally { + try_sql("DROP TABLE IF EXISTS ${tableName}") + try_sql("DROP TABLE IF EXISTS ${tableName2}") + File path = new File(outFilePath) + if (path.exists()) { + for (File f: path.listFiles()) { + f.delete(); + } + path.delete(); + } + } +} diff --git a/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy b/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy new file mode 100644 index 00000000000000..4f47eb9c66fb8b --- /dev/null +++ b/regression-test/suites/export_p0/test_outfile_csv_with_names_and_types.groovy @@ -0,0 +1,169 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Paths + +suite("test_outfile_csv_with_names_and_types") { + def dbName = "test_outfile_csv_with_names_and_types" + sql "CREATE DATABASE IF NOT EXISTS ${dbName}" + sql "USE $dbName" + StringBuilder strBuilder = new StringBuilder() + strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword) + strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe") + + String command = strBuilder.toString() + def process = command.toString().execute() + def code = process.waitFor() + def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + def out = process.getText() + logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def response = parseJson(out.trim()) + assertEquals(response.code, 0) + assertEquals(response.msg, "success") + def configJson = response.data.rows + boolean enableOutfileToLocal = false + for (Object conf: configJson) { + assert conf instanceof Map + if (((Map) conf).get("Name").toLowerCase() == "enable_outfile_to_local") { + enableOutfileToLocal = ((Map) conf).get("Value").toLowerCase() == "true" + } + } + if (!enableOutfileToLocal) { + logger.warn("Please set enable_outfile_to_local to true to run test_outfile") + return + } + def tableName = "outfil_csv_with_names_and_types_test" + def tableName2 = "outfil_csv_with_names_and_types_test2" + def uuid = UUID.randomUUID().toString() + def outFilePath = """/tmp/test_outfile_with_names_types_${uuid}""" + + try { + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + StringBuilder sb = new StringBuilder() + int i = 1 + for (; i < 10; i ++) { + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}), + """) + } + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + """) + sql """ INSERT INTO ${tableName} VALUES + ${sb.toString()} + """ + order_qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ + + // check outfile + File path = new File(outFilePath) + if (!path.exists()) { + assert path.mkdirs() + } else { + throw new IllegalStateException("""${outFilePath} already exists! """) + } + sql """ + SELECT * FROM ${tableName} t ORDER BY user_id INTO OUTFILE "file://${outFilePath}/" FORMAT AS csv_with_names_and_types + PROPERTIES("column_separator" = ","); + """ + + File[] files = path.listFiles() + assert files.length == 1 + + // check column names + String columnNames = """user_id,date,datetime,date_1,datetime_1,datetime_2,datetime_3,city,age,sex,bool_col,int_col,bigint_col,largeint_col,float_col,double_col,char_col,decimal_col""" + String columnTypes = """INT,DATEV2,DATETIMEV2,DATEV2,DATETIMEV2,DATETIMEV2,DATETIMEV2,VARCHAR,SMALLINT,TINYINT,BOOL,INT,BIGINT,INT,FLOAT,DOUBLE,CHAR,DECIMAL32""" + List outLines = Files.readAllLines(Paths.get(files[0].getAbsolutePath()), StandardCharsets.UTF_8); + assertEquals(columnNames, outLines.get(0)) + assertEquals(columnTypes, outLines.get(1)) + + // check data correctness + sql """ DROP TABLE IF EXISTS ${tableName2} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName2} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + + StringBuilder commandBuilder = new StringBuilder() + commandBuilder.append("""curl -v --location-trusted -u ${context.config.feHttpUser}:${context.config.feHttpPassword}""") + commandBuilder.append(""" -H format:csv_with_names_and_types -H column_separator:, -T """ + files[0].getAbsolutePath() + """ http://${context.config.feHttpAddress}/api/""" + dbName + "/" + tableName2 + "/_stream_load") + command = commandBuilder.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))) + out = process.getText() + logger.info("Run command: command=" + command + ",code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + order_qt_select_default2 """ SELECT * FROM ${tableName2} t ORDER BY user_id; """ + } finally { + try_sql("DROP TABLE IF EXISTS ${tableName}") + try_sql("DROP TABLE IF EXISTS ${tableName2}") + File path = new File(outFilePath) + if (path.exists()) { + for (File f: path.listFiles()) { + f.delete(); + } + path.delete(); + } + } +} diff --git a/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names.groovy b/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names.groovy new file mode 100644 index 00000000000000..3fe4a99d79eee0 --- /dev/null +++ b/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names.groovy @@ -0,0 +1,171 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Paths + +suite("test_outfile_csv_with_names") { + sql 'set enable_nereids_planner=true' + sql 'set enable_fallback_to_original_planner=false' + + + def dbName = "test_outfile_csv_with_names" + sql "CREATE DATABASE IF NOT EXISTS ${dbName}" + sql "USE $dbName" + StringBuilder strBuilder = new StringBuilder() + strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword) + strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe") + + String command = strBuilder.toString() + def process = command.toString().execute() + def code = process.waitFor() + def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + def out = process.getText() + logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def response = parseJson(out.trim()) + assertEquals(response.code, 0) + assertEquals(response.msg, "success") + def configJson = response.data.rows + boolean enableOutfileToLocal = false + for (Object conf: configJson) { + assert conf instanceof Map + if (((Map) conf).get("Name").toLowerCase() == "enable_outfile_to_local") { + enableOutfileToLocal = ((Map) conf).get("Value").toLowerCase() == "true" + } + } + if (!enableOutfileToLocal) { + logger.warn("Please set enable_outfile_to_local to true to run test_outfile") + return + } + def tableName = "outfil_csv_with_names_test" + def tableName2 = "outfil_csv_with_names_test2" + def uuid = UUID.randomUUID().toString() + def outFilePath = """/tmp/test_outfile_with_names_${uuid}""" + + try { + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + StringBuilder sb = new StringBuilder() + int i = 1 + for (; i < 10; i ++) { + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}), + """) + } + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + """) + sql """ INSERT INTO ${tableName} VALUES + ${sb.toString()} + """ + order_qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ + + // check outfile + File path = new File(outFilePath) + if (!path.exists()) { + assert path.mkdirs() + } else { + throw new IllegalStateException("""${outFilePath} already exists! """) + } + sql """ + SELECT * FROM ${tableName} t ORDER BY user_id INTO OUTFILE "file://${outFilePath}/" FORMAT AS CSV_WITH_NAMES + PROPERTIES("column_separator" = ",");; + """ + + File[] files = path.listFiles() + assert files.length == 1 + + // check column names + String columnNames = """user_id,date,datetime,date_1,datetime_1,datetime_2,datetime_3,city,age,sex,bool_col,int_col,bigint_col,largeint_col,float_col,double_col,char_col,decimal_col""" + + List outLines = Files.readAllLines(Paths.get(files[0].getAbsolutePath()), StandardCharsets.UTF_8); + assertEquals(columnNames, outLines.get(0)) + + sql """ DROP TABLE IF EXISTS ${tableName2} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName2} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + + StringBuilder commandBuilder = new StringBuilder() + commandBuilder.append("""curl -v --location-trusted -u ${context.config.feHttpUser}:${context.config.feHttpPassword}""") + commandBuilder.append(""" -H format:csv_with_names -H column_separator:, -T """ + files[0].getAbsolutePath() + """ http://${context.config.feHttpAddress}/api/""" + dbName + "/" + tableName2 + "/_stream_load") + command = commandBuilder.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))) + out = process.getText() + logger.info("Run command: command=" + command + ",code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + order_qt_select_default2 """ SELECT * FROM ${tableName2} t ORDER BY user_id; """ + } finally { + try_sql("DROP TABLE IF EXISTS ${tableName}") + try_sql("DROP TABLE IF EXISTS ${tableName2}") + File path = new File(outFilePath) + if (path.exists()) { + for (File f: path.listFiles()) { + f.delete(); + } + path.delete(); + } + } +} diff --git a/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names_and_types.groovy b/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names_and_types.groovy new file mode 100644 index 00000000000000..2add9c3c3a82d7 --- /dev/null +++ b/regression-test/suites/nereids_p0/outfile/test_outfile_csv_with_names_and_types.groovy @@ -0,0 +1,172 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.codehaus.groovy.runtime.IOGroovyMethods + +import java.nio.charset.StandardCharsets +import java.nio.file.Files +import java.nio.file.Paths + +suite("test_outfile_csv_with_names_and_types") { + sql 'set enable_nereids_planner=true' + sql 'set enable_fallback_to_original_planner=false' + + + def dbName = "test_outfile_csv_with_names_and_types" + sql "CREATE DATABASE IF NOT EXISTS ${dbName}" + sql "USE $dbName" + StringBuilder strBuilder = new StringBuilder() + strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword) + strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe") + + String command = strBuilder.toString() + def process = command.toString().execute() + def code = process.waitFor() + def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))); + def out = process.getText() + logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + def response = parseJson(out.trim()) + assertEquals(response.code, 0) + assertEquals(response.msg, "success") + def configJson = response.data.rows + boolean enableOutfileToLocal = false + for (Object conf: configJson) { + assert conf instanceof Map + if (((Map) conf).get("Name").toLowerCase() == "enable_outfile_to_local") { + enableOutfileToLocal = ((Map) conf).get("Value").toLowerCase() == "true" + } + } + if (!enableOutfileToLocal) { + logger.warn("Please set enable_outfile_to_local to true to run test_outfile") + return + } + def tableName = "outfil_csv_with_names_and_types_test" + def tableName2 = "outfil_csv_with_names_and_types_test2" + def uuid = UUID.randomUUID().toString() + def outFilePath = """/tmp/test_outfile_with_names_types_${uuid}""" + + try { + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + StringBuilder sb = new StringBuilder() + int i = 1 + for (; i < 10; i ++) { + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}), + """) + } + sb.append(""" + (${i}, '2017-10-01', '2017-10-01 00:00:00', '2017-10-01', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', '2017-10-01 00:00:00.111111', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL) + """) + sql """ INSERT INTO ${tableName} VALUES + ${sb.toString()} + """ + order_qt_select_default1 """ SELECT * FROM ${tableName} t ORDER BY user_id; """ + + // check outfile + File path = new File(outFilePath) + if (!path.exists()) { + assert path.mkdirs() + } else { + throw new IllegalStateException("""${outFilePath} already exists! """) + } + sql """ + SELECT * FROM ${tableName} t ORDER BY user_id INTO OUTFILE "file://${outFilePath}/" FORMAT AS csv_with_names_and_types + PROPERTIES("column_separator" = ","); + """ + File[] files = path.listFiles() + assert files.length == 1 + + // check column names + String columnNames = """user_id,date,datetime,date_1,datetime_1,datetime_2,datetime_3,city,age,sex,bool_col,int_col,bigint_col,largeint_col,float_col,double_col,char_col,decimal_col""" + String columnTypes = """INT,DATEV2,DATETIMEV2,DATEV2,DATETIMEV2,DATETIMEV2,DATETIMEV2,VARCHAR,SMALLINT,TINYINT,BOOL,INT,BIGINT,INT,FLOAT,DOUBLE,CHAR,DECIMAL32""" + List outLines = Files.readAllLines(Paths.get(files[0].getAbsolutePath()), StandardCharsets.UTF_8); + assertEquals(columnNames, outLines.get(0)) + assertEquals(columnTypes, outLines.get(1)) + + // check data correctness + sql """ DROP TABLE IF EXISTS ${tableName2} """ + sql """ + CREATE TABLE IF NOT EXISTS ${tableName2} ( + `user_id` INT NOT NULL COMMENT "用户id", + `date` DATE NOT NULL COMMENT "数据灌入日期时间", + `datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间", + `date_1` DATEV2 NOT NULL COMMENT "", + `datetime_1` DATETIMEV2 NOT NULL COMMENT "", + `datetime_2` DATETIMEV2(3) NOT NULL COMMENT "", + `datetime_3` DATETIMEV2(6) NOT NULL COMMENT "", + `city` VARCHAR(20) COMMENT "用户所在城市", + `age` SMALLINT COMMENT "用户年龄", + `sex` TINYINT COMMENT "用户性别", + `bool_col` boolean COMMENT "", + `int_col` int COMMENT "", + `bigint_col` bigint COMMENT "", + `largeint_col` int COMMENT "", + `float_col` float COMMENT "", + `double_col` double COMMENT "", + `char_col` CHAR(10) COMMENT "", + `decimal_col` decimal COMMENT "" + ) + DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1"); + """ + + StringBuilder commandBuilder = new StringBuilder() + commandBuilder.append("""curl -v --location-trusted -u ${context.config.feHttpUser}:${context.config.feHttpPassword}""") + commandBuilder.append(""" -H format:csv_with_names_and_types -H column_separator:, -T """ + files[0].getAbsolutePath() + """ http://${context.config.feHttpAddress}/api/""" + dbName + "/" + tableName2 + "/_stream_load") + command = commandBuilder.toString() + process = command.execute() + code = process.waitFor() + err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream()))) + out = process.getText() + logger.info("Run command: command=" + command + ",code=" + code + ", out=" + out + ", err=" + err) + assertEquals(code, 0) + order_qt_select_default2 """ SELECT * FROM ${tableName2} t ORDER BY user_id; """ + } finally { + try_sql("DROP TABLE IF EXISTS ${tableName}") + try_sql("DROP TABLE IF EXISTS ${tableName2}") + File path = new File(outFilePath) + if (path.exists()) { + for (File f: path.listFiles()) { + f.delete(); + } + path.delete(); + } + } +}