From af45f02148fb419aa43575176ce99d01f68480a3 Mon Sep 17 00:00:00 2001 From: lidezhu <47731263+lidezhu@users.noreply.github.com> Date: Fri, 14 May 2021 14:06:59 +0800 Subject: [PATCH] Push down filter on timestamp type column to storage layer (#1875) * try push down timestamp filter to storage --- dbms/src/Databases/test/gtest_database.cpp | 2 +- dbms/src/Debug/DBGInvoker.cpp | 3 + dbms/src/Debug/dbgFuncMisc.cpp | 41 +++++ dbms/src/Debug/dbgFuncMisc.h | 15 ++ .../Coprocessor/DAGQueryBlockInterpreter.cpp | 3 +- dbms/src/Flash/Coprocessor/DAGQueryInfo.h | 7 +- .../FilterParser/FilterParser_dag.cpp | 54 +++++-- .../tests/gtest_dm_minmax_index.cpp | 13 ++ .../misc/timestamp_rough_set_filter.test | 148 ++++++++++++++++++ .../expr/timestamp_filter.test | 35 +++++ tests/run-test.py | 10 +- tests/run-test.sh | 6 +- 12 files changed, 312 insertions(+), 25 deletions(-) create mode 100644 dbms/src/Debug/dbgFuncMisc.cpp create mode 100644 dbms/src/Debug/dbgFuncMisc.h create mode 100644 tests/delta-merge-test/query/misc/timestamp_rough_set_filter.test diff --git a/dbms/src/Databases/test/gtest_database.cpp b/dbms/src/Databases/test/gtest_database.cpp index b5f25b49c26..526a18be483 100644 --- a/dbms/src/Databases/test/gtest_database.cpp +++ b/dbms/src/Databases/test/gtest_database.cpp @@ -796,7 +796,7 @@ DatabasePtr detachThenAttach(Context & ctx, const String & db_name, DatabasePtr } db = ctx.getDatabase(db_name); - return db; + return std::move(db); } TEST_F(DatabaseTiFlash_test, Tombstone) diff --git a/dbms/src/Debug/DBGInvoker.cpp b/dbms/src/Debug/DBGInvoker.cpp index e80039d6b12..3594764513e 100644 --- a/dbms/src/Debug/DBGInvoker.cpp +++ b/dbms/src/Debug/DBGInvoker.cpp @@ -3,6 +3,7 @@ #include #include #include +#include #include #include #include @@ -103,6 +104,8 @@ DBGInvoker::DBGInvoker() regSchemalessFunc("mapped_database", dbgFuncMappedDatabase); regSchemalessFunc("mapped_table", dbgFuncMappedTable); regSchemafulFunc("query_mapped", dbgFuncQueryMapped); + + regSchemalessFunc("search_log_for_key", dbgFuncSearchLogForKey); } void replaceSubstr(std::string & str, const std::string & target, const std::string & replacement) diff --git a/dbms/src/Debug/dbgFuncMisc.cpp b/dbms/src/Debug/dbgFuncMisc.cpp new file mode 100644 index 00000000000..374e6cd4007 --- /dev/null +++ b/dbms/src/Debug/dbgFuncMisc.cpp @@ -0,0 +1,41 @@ +#include +#include +#include +#include + +#include +#include + +namespace DB +{ +void dbgFuncSearchLogForKey(Context & context, const ASTs & args, DBGInvoker::Printer output) +{ + if (args.size() < 1) + throw Exception("Args not matched, should be: key", ErrorCodes::BAD_ARGUMENTS); + + String key = safeGet(typeid_cast(*args[0]).value); + auto log_path = context.getConfigRef().getString("logger.log"); + + std::ifstream file(log_path); + std::vector line_candidates; + String line; + while (std::getline(file, line)) + { + if ((line.find(key) != String::npos) && (line.find("DBGInvoke") == String::npos)) + line_candidates.emplace_back(line); + } + if (line_candidates.empty()) + { + output("Invalid"); + return; + } + auto & target_line = line_candidates.back(); + auto sub_line = target_line.substr(target_line.find(key)); + std::regex rx(R"([+-]?([0-9]+([.][0-9]*)?|[.][0-9]+))"); + std::smatch m; + if (regex_search(sub_line, m, rx)) + output(m[1]); + else + output("Invalid"); +} +} // namespace DB diff --git a/dbms/src/Debug/dbgFuncMisc.h b/dbms/src/Debug/dbgFuncMisc.h new file mode 100644 index 00000000000..3f8cea986d1 --- /dev/null +++ b/dbms/src/Debug/dbgFuncMisc.h @@ -0,0 +1,15 @@ +#pragma once + +#include + +namespace DB +{ + +class Context; + +// Find the last occurence of `key` in log file and extract the first number follow the key. +// Usage: +// ./storage-client.sh "DBGInvoke search_log_for_key(key)" +void dbgFuncSearchLogForKey(Context & context, const ASTs & args, DBGInvoker::Printer output); + +} // namespace DB diff --git a/dbms/src/Flash/Coprocessor/DAGQueryBlockInterpreter.cpp b/dbms/src/Flash/Coprocessor/DAGQueryBlockInterpreter.cpp index ea6f44787ad..c554dd08a53 100644 --- a/dbms/src/Flash/Coprocessor/DAGQueryBlockInterpreter.cpp +++ b/dbms/src/Flash/Coprocessor/DAGQueryBlockInterpreter.cpp @@ -230,7 +230,8 @@ void DAGQueryBlockInterpreter::executeTS(const tipb::TableScan & ts, Pipeline & SelectQueryInfo query_info; /// to avoid null point exception query_info.query = dummy_query; - query_info.dag_query = std::make_unique(conditions, analyzer->getPreparedSets(), analyzer->getCurrentInputColumns()); + query_info.dag_query = std::make_unique( + conditions, analyzer->getPreparedSets(), analyzer->getCurrentInputColumns(), context.getTimezoneInfo()); query_info.mvcc_query_info = std::move(mvcc_query_info); FAIL_POINT_PAUSE(FailPoints::pause_after_learner_read); diff --git a/dbms/src/Flash/Coprocessor/DAGQueryInfo.h b/dbms/src/Flash/Coprocessor/DAGQueryInfo.h index 6a34af4f411..092387845d3 100644 --- a/dbms/src/Flash/Coprocessor/DAGQueryInfo.h +++ b/dbms/src/Flash/Coprocessor/DAGQueryInfo.h @@ -11,13 +11,16 @@ namespace DB // be used to extracted key conditions by storage engine struct DAGQueryInfo { - DAGQueryInfo(const std::vector & filters_, DAGPreparedSets dag_sets_, const std::vector & source_columns_) - : filters(filters_), dag_sets(std::move(dag_sets_)), source_columns(source_columns_){}; + DAGQueryInfo(const std::vector & filters_, DAGPreparedSets dag_sets_, + const std::vector & source_columns_, const TimezoneInfo & timezone_info_) + : filters(filters_), dag_sets(std::move(dag_sets_)), source_columns(source_columns_), timezone_info(timezone_info_){}; // filters in dag request const std::vector & filters; // Prepared sets extracted from dag request, which are used for indices // by storage engine. DAGPreparedSets dag_sets; const std::vector & source_columns; + + const TimezoneInfo & timezone_info; }; } // namespace DB diff --git a/dbms/src/Storages/DeltaMerge/FilterParser/FilterParser_dag.cpp b/dbms/src/Storages/DeltaMerge/FilterParser/FilterParser_dag.cpp index 1490558d027..61b07972016 100644 --- a/dbms/src/Storages/DeltaMerge/FilterParser/FilterParser_dag.cpp +++ b/dbms/src/Storages/DeltaMerge/FilterParser/FilterParser_dag.cpp @@ -42,10 +42,8 @@ inline bool isRoughSetFilterSupportType(const Int32 field_type) case TiDB::TypeDate: case TiDB::TypeTime: case TiDB::TypeDatetime: - return true; - // For timestamp, should take time_zone into consideration. Disable it. case TiDB::TypeTimestamp: - return false; + return true; // For these types, should take collation into consideration. Disable them. case TiDB::TypeVarchar: case TiDB::TypeJSON: @@ -89,6 +87,7 @@ inline RSOperatorPtr parseTiCompareExpr( // const FilterParser::RSFilterType filter_type, const ColumnDefines & columns_to_read, const FilterParser::AttrCreatorByColumnID & creator, + const TimezoneInfo & timezone_info, Poco::Logger * /* log */) { if (unlikely(expr.children_size() != 2)) @@ -101,10 +100,16 @@ inline RSOperatorPtr parseTiCompareExpr( // Attr attr; Field value; - UInt32 state = 0x0; - constexpr UInt32 state_has_column = 0x1; - constexpr UInt32 state_has_literal = 0x2; - constexpr UInt32 state_finish = state_has_column | state_has_literal; + UInt32 state = 0x0; + constexpr UInt32 state_has_column = 0x1; + constexpr UInt32 state_has_literal = 0x2; + constexpr UInt32 state_finish = state_has_column | state_has_literal; + bool is_timestamp_column = false; + for (const auto & child : expr.children()) + { + if (isColumnExpr(child)) + is_timestamp_column = (child.field_type().tp() == TiDB::TypeTimestamp); + } for (const auto & child : expr.children()) { if (isColumnExpr(child)) @@ -125,6 +130,27 @@ inline RSOperatorPtr parseTiCompareExpr( // { state |= state_has_literal; value = decodeLiteral(child); + if (is_timestamp_column) + { + auto literal_type = child.field_type().tp(); + if (unlikely(literal_type != TiDB::TypeTimestamp && literal_type != TiDB::TypeDatetime)) + return createUnsupported(expr.ShortDebugString(), + "Compare timestamp column with literal type(" + DB::toString(literal_type) + + ") is not supported", + false); + // convert literal value from timezone specified in cop request to UTC + if (literal_type == TiDB::TypeDatetime && !timezone_info.is_utc_timezone) + { + static const auto & time_zone_utc = DateLUT::instance("UTC"); + UInt64 from_time = value.get(); + UInt64 result_time = from_time; + if (timezone_info.is_name_based) + convertTimeZone(from_time, result_time, *timezone_info.timezone, time_zone_utc); + else if (timezone_info.timezone_offset != 0) + convertTimeZoneByOffset(from_time, result_time, -timezone_info.timezone_offset, time_zone_utc); + value = Field(result_time); + } + } } } @@ -167,6 +193,7 @@ inline RSOperatorPtr parseTiCompareExpr( // RSOperatorPtr parseTiExpr(const tipb::Expr & expr, const ColumnDefines & columns_to_read, const FilterParser::AttrCreatorByColumnID & creator, + const TimezoneInfo & timezone_info, Poco::Logger * log) { assert(isFunctionExpr(expr)); @@ -192,7 +219,7 @@ RSOperatorPtr parseTiExpr(const tipb::Expr & expr, { const auto & child = expr.children(0); if (likely(isFunctionExpr(child))) - op = createNot(parseTiExpr(child, columns_to_read, creator, log)); + op = createNot(parseTiExpr(child, columns_to_read, creator, timezone_info, log)); else op = createUnsupported(child.ShortDebugString(), "child of logical not is not function", false); } @@ -206,7 +233,7 @@ RSOperatorPtr parseTiExpr(const tipb::Expr & expr, { const auto & child = expr.children(i); if (likely(isFunctionExpr(child))) - children.emplace_back(parseTiExpr(child, columns_to_read, creator, log)); + children.emplace_back(parseTiExpr(child, columns_to_read, creator, timezone_info, log)); else children.emplace_back(createUnsupported(child.ShortDebugString(), "child of logical operator is not function", false)); } @@ -223,7 +250,7 @@ RSOperatorPtr parseTiExpr(const tipb::Expr & expr, case FilterParser::RSFilterType::GreaterEqual: case FilterParser::RSFilterType::Less: case FilterParser::RSFilterType::LessEuqal: - op = parseTiCompareExpr(expr, filter_type, columns_to_read, creator, log); + op = parseTiCompareExpr(expr, filter_type, columns_to_read, creator, timezone_info, log); break; case FilterParser::RSFilterType::In: @@ -246,10 +273,11 @@ RSOperatorPtr parseTiExpr(const tipb::Expr & expr, inline RSOperatorPtr tryParse(const tipb::Expr & filter, const ColumnDefines & columns_to_read, const FilterParser::AttrCreatorByColumnID & creator, + const TimezoneInfo & timezone_info, Poco::Logger * log) { if (isFunctionExpr(filter)) - return cop::parseTiExpr(filter, columns_to_read, creator, log); + return cop::parseTiExpr(filter, columns_to_read, creator, timezone_info, log); else return createUnsupported(filter.ShortDebugString(), "child of logical and is not function", false); } @@ -268,7 +296,7 @@ RSOperatorPtr FilterParser::parseDAGQuery(const DAGQueryInfo & if (dag_info.filters.size() == 1) { - op = cop::tryParse(*dag_info.filters[0], columns_to_read, creator, log); + op = cop::tryParse(*dag_info.filters[0], columns_to_read, creator, dag_info.timezone_info, log); } else { @@ -277,7 +305,7 @@ RSOperatorPtr FilterParser::parseDAGQuery(const DAGQueryInfo & for (size_t i = 0; i < dag_info.filters.size(); ++i) { const auto & filter = *dag_info.filters[i]; - children.emplace_back(cop::tryParse(filter, columns_to_read, creator, log)); + children.emplace_back(cop::tryParse(filter, columns_to_read, creator, dag_info.timezone_info, log)); } op = createAnd(children); } diff --git a/dbms/src/Storages/DeltaMerge/tests/gtest_dm_minmax_index.cpp b/dbms/src/Storages/DeltaMerge/tests/gtest_dm_minmax_index.cpp index 89e61b84099..a79d30373d4 100644 --- a/dbms/src/Storages/DeltaMerge/tests/gtest_dm_minmax_index.cpp +++ b/dbms/src/Storages/DeltaMerge/tests/gtest_dm_minmax_index.cpp @@ -128,6 +128,19 @@ try ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "Date", "2020-09-27", createLessEqual(attr("Date"), Field((String) "2020-09-27"), 0))); ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "Date", "2020-09-27", createLessEqual(attr("Date"), Field((String) "2020-09-26"), 0))); + ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createEqual(attr("MyDateTime"), parseMyDateTime("2020-09-27")))); + ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createEqual(attr("MyDateTime"), parseMyDateTime("2020-09-28")))); + ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createIn(attr("MyDateTime"), {parseMyDateTime("2020-09-27")}))); + ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createIn(attr("MyDateTime"), {parseMyDateTime("2020-09-28")}))); + ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createGreater(attr("MyDateTime"), parseMyDateTime("2020-09-26"), 0))); + ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createGreater(attr("MyDateTime"), parseMyDateTime("2020-09-27"), 0))); + ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createGreaterEqual(attr("MyDateTime"), parseMyDateTime("2020-09-27"), 0))); + ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createGreaterEqual(attr("MyDateTime"), parseMyDateTime("2020-09-28"), 0))); + ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createLess(attr("MyDateTime"), parseMyDateTime("2020-09-28"), 0))); + ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createLess(attr("MyDateTime"), parseMyDateTime("2020-09-27"), 0))); + ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createLessEqual(attr("MyDateTime"), parseMyDateTime("2020-09-27"), 0))); + ASSERT_EQ(false, checkMatch(__FUNCTION__, *context, "MyDateTime", "2020-09-27", createLessEqual(attr("MyDateTime"), parseMyDateTime("2020-09-26"), 0))); + /// Currently we don't do filtering for null values. i.e. if a pack contains any null values, then the pack will pass the filter. ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "Nullable(Int64)", {{"0", "0", "0", "100"}, {"1", "1", "0", "\\N"}}, createEqual(attr("Nullable(Int64)"), Field((Int64)101)))); ASSERT_EQ(true, checkMatch(__FUNCTION__, *context, "Nullable(Int64)", {{"0", "0", "0", "100"}, {"1", "1", "0", "\\N"}}, createIn(attr("Nullable(Int64)"), {Field((Int64)101)}))); diff --git a/tests/delta-merge-test/query/misc/timestamp_rough_set_filter.test b/tests/delta-merge-test/query/misc/timestamp_rough_set_filter.test new file mode 100644 index 00000000000..188b73ae65e --- /dev/null +++ b/tests/delta-merge-test/query/misc/timestamp_rough_set_filter.test @@ -0,0 +1,148 @@ +# Must enable DT rough set filter and open debug level log to run this test, otherwise disable this test +# Preparation. +=> DBGInvoke __enable_schema_sync_service('true') + +=> DBGInvoke __drop_tidb_table(default, test) +=> drop table if exists default.test + +=> DBGInvoke __set_flush_threshold(1000000, 1000000) + +# Data. +=> DBGInvoke __mock_tidb_table(default, test, 'col_1 Int64, col_2 default \'asTiDBType|timestamp(5)\'') +=> DBGInvoke __refresh_schemas() +=> DBGInvoke __put_region(4, 0, 100, default, test) +=> DBGInvoke __raft_insert_row(default, test, 4, 50, 50, '2019-06-10 09:00:00') +=> DBGInvoke __raft_insert_row(default, test, 4, 51, 51, '2019-06-11 07:00:00') +=> DBGInvoke __raft_insert_row(default, test, 4, 52, 52, '2019-06-12 08:00:00') +=> DBGInvoke __raft_insert_row(default, test, 4, 53, 53, '2019-06-11 08:00:00') +=> DBGInvoke __raft_insert_row(default, test, 4, 54, 54, '2019-06-13 09:00:01') +=> DBGInvoke __raft_insert_row(default, test, 4, 55, 55, '2019-06-13 12:00:01') + +=> DBGInvoke __try_flush_region(4) + +=> manage table default.test flush +=> manage table default.test merge delta +=> select tidb_database, tidb_table, delta_rate_rows, total_stable_rows from system.dt_tables where tidb_database='default' and tidb_table='test' and is_tombstone = 0 +┌─tidb_database─┬─tidb_table─┬─delta_rate_rows─┬─total_stable_rows─┐ +│ default │ test │ 0 │ 6 │ +└───────────────┴────────────┴─────────────────┴───────────────────┘ + +# The default time zone in cop request is UTC +=> DBGInvoke dag('select * from default.test') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 50 │ 2019-06-10 09:00:00.00000 │ +│ 51 │ 2019-06-11 07:00:00.00000 │ +│ 52 │ 2019-06-12 08:00:00.00000 │ +│ 53 │ 2019-06-11 08:00:00.00000 │ +│ 54 │ 2019-06-13 09:00:01.00000 │ +│ 55 │ 2019-06-13 12:00:01.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 < cast_string_datetime(\'2019-06-10 09:00:00.00000\')') + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 <= cast_string_datetime(\'2019-06-10 09:00:00.00000\')') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 50 │ 2019-06-10 09:00:00.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +# when using 'default' encoding, return timestamp value is shown in UTC time zone +# but the literal timestamp value in where clause is in the time zone specified by the cop request +# so '2019-06-10 17:00:00.00000'(tz_offset:28800) below is equal to '2019-06-10 09:00:00.00000' in UTC +=> DBGInvoke dag('select * from default.test where col_2 < cast_string_datetime(\'2019-06-10 17:00:00.00000\')',4,'encode_type:default,tz_offset:28800') + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 <= cast_string_datetime(\'2019-06-10 17:00:00.00000\')',4,'encode_type:default,tz_offset:28800') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 50 │ 2019-06-10 09:00:00.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 < cast_string_datetime(\'2019-06-10 04:00:00.00000\')',4,'encode_type:default,tz_name:America/Chicago') + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 <= cast_string_datetime(\'2019-06-10 04:00:00.00000\')',4,'encode_type:default,tz_name:America/Chicago') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 50 │ 2019-06-10 09:00:00.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 > cast_string_datetime(\'2019-06-13 12:00:01.00000\')') + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 >= cast_string_datetime(\'2019-06-13 12:00:01.00000\')') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 55 │ 2019-06-13 12:00:01.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 > cast_string_datetime(\'2019-06-13 20:00:01.00000\')',4,'encode_type:default,tz_offset:28800') + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 >= cast_string_datetime(\'2019-06-13 20:00:01.00000\')',4,'encode_type:default,tz_offset:28800') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 55 │ 2019-06-13 12:00:01.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 > cast_string_datetime(\'2019-06-13 07:00:01.00000\')',4,'encode_type:default,tz_name:America/Chicago') + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + +=> DBGInvoke dag('select * from default.test where col_2 >= cast_string_datetime(\'2019-06-13 07:00:01.00000\')',4,'encode_type:default,tz_name:America/Chicago') +┌─test.col_1─┬────────────────test.col_2─┐ +│ 55 │ 2019-06-13 12:00:01.00000 │ +└────────────┴───────────────────────────┘ + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +# Clean up. +=> DBGInvoke __drop_tidb_table(default, test) +=> drop table if exists default.test diff --git a/tests/fullstack-test-dt/expr/timestamp_filter.test b/tests/fullstack-test-dt/expr/timestamp_filter.test index d45312967ac..4b6c8959a30 100644 --- a/tests/fullstack-test-dt/expr/timestamp_filter.test +++ b/tests/fullstack-test-dt/expr/timestamp_filter.test @@ -1,3 +1,4 @@ +# Must enable DT rough set filter and open debug level log to run this test, otherwise disable this test mysql> drop table if exists test.t; # Test filter not-null timestamp filter @@ -29,6 +30,18 @@ mysql> SET time_zone = '+0:00'; set session tidb_isolation_read_engines='tiflash | 1 | 2000-01-01 10:00:00 | +----+---------------------+ +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +mysql> SET time_zone = '+0:00'; set session tidb_isolation_read_engines='tiflash'; select * from test.t where ts != '2000-01-01 10:00:00'; + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + ## Tests using time_zone '+8:00' mysql> SET time_zone = '+8:00'; set session tidb_isolation_read_engines='tiflash'; select * from test.t where ts = '2000-01-01 18:00:00'; +----+---------------------+ @@ -37,6 +50,18 @@ mysql> SET time_zone = '+8:00'; set session tidb_isolation_read_engines='tiflash | 1 | 2000-01-01 18:00:00 | +----+---------------------+ +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + +mysql> SET time_zone = '+8:00'; set session tidb_isolation_read_engines='tiflash'; select * from test.t where ts != '2000-01-01 18:00:00'; + +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 100.00 │ +└─────────────────────────────────────────────┘ + mysql> drop table if exists test.t; @@ -71,6 +96,11 @@ mysql> SET time_zone = '+0:00'; set session tidb_isolation_read_engines='tiflash | 1 | 2000-01-01 10:00:00 | +----+---------------------+ +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + ## Tests using time_zone '+8:00' mysql> SET time_zone = '+8:00'; set session tidb_isolation_read_engines='tiflash'; select * from test.t where ts = '2000-01-01 18:00:00'; +----+---------------------+ @@ -79,5 +109,10 @@ mysql> SET time_zone = '+8:00'; set session tidb_isolation_read_engines='tiflash | 1 | 2000-01-01 18:00:00 | +----+---------------------+ +=> DBGInvoke search_log_for_key('RSFilter exclude rate') +┌─search_log_for_key("RSFilter exclude rate")─┐ +│ 0.00 │ +└─────────────────────────────────────────────┘ + mysql> drop table if exists test.t; diff --git a/tests/run-test.py b/tests/run-test.py index 59e632dffda..d3c04df6a42 100644 --- a/tests/run-test.py +++ b/tests/run-test.py @@ -200,17 +200,17 @@ def matched(outputs, matches, fuzz): class Matcher: - def __init__(self, executor, executor_tidb, executor_func, curl_tidb_executor, fuzz): + def __init__(self, executor, executor_tidb, executor_func, executor_curl_tidb, fuzz): self.executor = executor self.executor_tidb = executor_tidb self.executor_func = executor_func + self.executor_curl_tidb = executor_curl_tidb self.query_line_number = 0 self.fuzz = fuzz self.query = None self.outputs = None self.matches = [] self.is_mysql = False - self.curl_tidb_executor = curl_tidb_executor def on_line(self, line, line_number): if line.startswith(SLEEP_PREFIX): @@ -236,7 +236,7 @@ def on_line(self, line, line_number): self.query_line_number = line_number self.is_mysql = True self.query = line[len(CURL_TIDB_STATUS_PREFIX):] - self.outputs = self.curl_tidb_executor.exe(self.query) + self.outputs = self.executor_curl_tidb.exe(self.query) self.matches = [] elif line.startswith(CMD_PREFIX) or line.startswith(CMD_PREFIX_ALTER): if verbose: print 'running', line @@ -272,12 +272,12 @@ def on_finish(self): return True -def parse_exe_match(path, executor, executor_tidb, executor_func, curl_tidb_executor, fuzz): +def parse_exe_match(path, executor, executor_tidb, executor_func, executor_curl_tidb, fuzz): todos = [] line_number = 0 line_number_cached = 0 with open(path) as file: - matcher = Matcher(executor, executor_tidb, executor_func, curl_tidb_executor, fuzz) + matcher = Matcher(executor, executor_tidb, executor_func, executor_curl_tidb, fuzz) cached = None for origin in file: line_number += 1 diff --git a/tests/run-test.sh b/tests/run-test.sh index d45787feacd..0b3b2a7593e 100755 --- a/tests/run-test.sh +++ b/tests/run-test.sh @@ -41,7 +41,7 @@ function run_file() local fuzz="$4" local skip_raw_test="$5" local mysql_client="$6" - local verbose="$7" + local verbose="$7" local ext=${path##*.} @@ -76,7 +76,7 @@ function run_dir() local fuzz="$4" local skip_raw_test="$5" local mysql_client="$6" - local verbose="$7" + local verbose="$7" find "$path" -maxdepth 1 -name "*.visual" -type f | sort | while read file; do if [ -f "$file" ]; then @@ -122,7 +122,7 @@ function run_path() local fuzz="$4" local skip_raw_test="$5" local mysql_client="$6" - local verbose="$7" + local verbose="$7" if [ -f "$path" ]; then run_file "$dbc" "$path" "$continue_on_error" "$fuzz" "$skip_raw_test" "$mysql_client" "$verbose"