From 1c142309a6e69a78c4a3db7a336c40ffccd69e47 Mon Sep 17 00:00:00 2001 From: zy-kkk Date: Fri, 15 Sep 2023 22:16:07 +0800 Subject: [PATCH 01/33] [refactor](jdbc catalog) refactor JdbcFunctionPushDownRule (#23826) 1. Change from using string matching function to using Expr matching 2. Replace the `nvl` function with `ifnull` when pushed down to MySQL 3. Adapt ClickHouse's `from_unixtime` function to push down 4. Non-function filtering can still be pushed down when `enable_func_pushdown` is set to false --- .../clickhouse/clickhouse.yaml.tpl | 2 +- .../clickhouse/init/03-create-table.sql | 8 ++ .../clickhouse/init/04-insert.sql | 3 + .../jdbc/JdbcFunctionPushDownRule.java | 123 ++++++++++++++++-- .../planner/external/jdbc/JdbcScanNode.java | 60 ++++++--- .../planner/external/odbc/OdbcScanNode.java | 15 ++- .../jdbc/test_clickhouse_jdbc_catalog.out | Bin 2766 -> 2782 bytes .../jdbc/test_mysql_jdbc_catalog.out | 46 +++---- .../jdbc/test_clickhouse_jdbc_catalog.groovy | 9 ++ .../jdbc/test_mysql_jdbc_catalog.groovy | 61 ++++++++- 10 files changed, 264 insertions(+), 63 deletions(-) diff --git a/docker/thirdparties/docker-compose/clickhouse/clickhouse.yaml.tpl b/docker/thirdparties/docker-compose/clickhouse/clickhouse.yaml.tpl index 3832680deae90e..6e34c459be3fcc 100644 --- a/docker/thirdparties/docker-compose/clickhouse/clickhouse.yaml.tpl +++ b/docker/thirdparties/docker-compose/clickhouse/clickhouse.yaml.tpl @@ -19,7 +19,7 @@ version: "2.1" services: doris--clickhouse: - image: "clickhouse/clickhouse-server:latest" + image: "clickhouse/clickhouse-server:23.3" restart: always environment: CLICKHOUSE_PASSWORD: 123456 diff --git a/docker/thirdparties/docker-compose/clickhouse/init/03-create-table.sql b/docker/thirdparties/docker-compose/clickhouse/init/03-create-table.sql index c13fac5cf98839..46d2e6ffc3e041 100644 --- a/docker/thirdparties/docker-compose/clickhouse/init/03-create-table.sql +++ b/docker/thirdparties/docker-compose/clickhouse/init/03-create-table.sql @@ -131,3 +131,11 @@ CREATE TABLE doris_test.final_test ) ENGINE = ReplacingMergeTree ORDER BY key; + +CREATE TABLE doris_test.ts +( + id Int64, + ts UInt64 +) +ENGINE = MergeTree +ORDER BY id; \ No newline at end of file diff --git a/docker/thirdparties/docker-compose/clickhouse/init/04-insert.sql b/docker/thirdparties/docker-compose/clickhouse/init/04-insert.sql index 69c2ebd25f5686..c5e83eefe0a4a3 100644 --- a/docker/thirdparties/docker-compose/clickhouse/init/04-insert.sql +++ b/docker/thirdparties/docker-compose/clickhouse/init/04-insert.sql @@ -39,3 +39,6 @@ INSERT INTO doris_test.json VALUES ('1','{"a": 1, "b": { "c": 2, "d": [1, 2, 3] INSERT INTO doris_test.final_test Values (1, 'first'); INSERT INTO doris_test.final_test Values (1, 'second'); +INSERT INTO doris_test.ts values (1,1694438743); + + diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcFunctionPushDownRule.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcFunctionPushDownRule.java index bac9d62dbe534b..d1a44cb70031fa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcFunctionPushDownRule.java +++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcFunctionPushDownRule.java @@ -17,33 +17,130 @@ package org.apache.doris.planner.external.jdbc; +import org.apache.doris.analysis.Expr; +import org.apache.doris.analysis.FunctionCallExpr; +import org.apache.doris.analysis.FunctionName; import org.apache.doris.thrift.TOdbcTableType; +import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.List; +import java.util.Map; import java.util.TreeSet; +import java.util.function.Predicate; public class JdbcFunctionPushDownRule { - private static final TreeSet UNSUPPORTED_MYSQL_FUNCTIONS = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); + private static final Logger LOG = LogManager.getLogger(JdbcFunctionPushDownRule.class); + private static final TreeSet MYSQL_UNSUPPORTED_FUNCTIONS = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); + + static { + MYSQL_UNSUPPORTED_FUNCTIONS.add("date_trunc"); + MYSQL_UNSUPPORTED_FUNCTIONS.add("money_format"); + } + + private static final TreeSet CLICKHOUSE_SUPPORTED_FUNCTIONS = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); + + static { + CLICKHOUSE_SUPPORTED_FUNCTIONS.add("from_unixtime"); + } + + private static boolean isMySQLFunctionUnsupported(String functionName) { + return MYSQL_UNSUPPORTED_FUNCTIONS.contains(functionName.toLowerCase()); + } + + private static boolean isClickHouseFunctionUnsupported(String functionName) { + return !CLICKHOUSE_SUPPORTED_FUNCTIONS.contains(functionName.toLowerCase()); + } + + + private static final Map REPLACE_MYSQL_FUNCTIONS = Maps.newHashMap(); + + static { + REPLACE_MYSQL_FUNCTIONS.put("nvl", "ifnull"); + } + + private static boolean isReplaceMysqlFunctions(String functionName) { + return REPLACE_MYSQL_FUNCTIONS.containsKey(functionName.toLowerCase()); + } + + private static final Map REPLACE_CLICKHOUSE_FUNCTIONS = Maps.newHashMap(); static { - UNSUPPORTED_MYSQL_FUNCTIONS.add("date_trunc"); - UNSUPPORTED_MYSQL_FUNCTIONS.add("money_format"); + REPLACE_CLICKHOUSE_FUNCTIONS.put("from_unixtime", "FROM_UNIXTIME"); + } + + private static boolean isReplaceClickHouseFunctions(String functionName) { + return REPLACE_CLICKHOUSE_FUNCTIONS.containsKey(functionName.toLowerCase()); } - public static boolean isUnsupportedFunctions(TOdbcTableType tableType, String filter) { - if (tableType.equals(TOdbcTableType.MYSQL)) { - return isMySQLUnsupportedFunctions(filter); + public static Expr processFunctions(TOdbcTableType tableType, Expr expr, List errors) { + if (tableType == null || expr == null) { + return expr; + } + + Predicate checkFunction; + Predicate replaceFunction; + + if (TOdbcTableType.MYSQL.equals(tableType)) { + replaceFunction = JdbcFunctionPushDownRule::isReplaceMysqlFunctions; + checkFunction = JdbcFunctionPushDownRule::isMySQLFunctionUnsupported; + } else if (TOdbcTableType.CLICKHOUSE.equals(tableType)) { + replaceFunction = JdbcFunctionPushDownRule::isReplaceClickHouseFunctions; + checkFunction = JdbcFunctionPushDownRule::isClickHouseFunctionUnsupported; } else { - return false; + return expr; } + + return processFunctionsRecursively(expr, checkFunction, replaceFunction, errors, tableType); } - private static boolean isMySQLUnsupportedFunctions(String filter) { - for (String func : UNSUPPORTED_MYSQL_FUNCTIONS) { - if (filter.contains(func)) { - return true; + private static Expr processFunctionsRecursively(Expr expr, Predicate checkFunction, + Predicate replaceFunction, List errors, TOdbcTableType tableType) { + if (expr instanceof FunctionCallExpr) { + FunctionCallExpr functionCallExpr = (FunctionCallExpr) expr; + String func = functionCallExpr.getFnName().getFunction(); + + Preconditions.checkArgument(!func.isEmpty(), "function can not be empty"); + + func = replaceFunctionNameIfNecessary(func, replaceFunction, functionCallExpr, tableType); + + if (!func.isEmpty() && checkFunction.test(func)) { + String errMsg = "Unsupported function: " + func + " in expr: " + expr.toMySql() + + " in JDBC Table Type: " + tableType; + LOG.warn(errMsg); + errors.add(errMsg); } } - return false; + + List children = expr.getChildren(); + for (int i = 0; i < children.size(); i++) { + Expr child = children.get(i); + Expr newChild = processFunctionsRecursively(child, checkFunction, replaceFunction, errors, tableType); + expr.setChild(i, newChild); + } + + return expr; } -} + private static String replaceFunctionNameIfNecessary(String func, Predicate replaceFunction, + FunctionCallExpr functionCallExpr, TOdbcTableType tableType) { + if (replaceFunction.test(func)) { + String newFunc; + if (TOdbcTableType.MYSQL.equals(tableType)) { + newFunc = REPLACE_MYSQL_FUNCTIONS.get(func.toLowerCase()); + } else if (TOdbcTableType.CLICKHOUSE.equals(tableType)) { + newFunc = REPLACE_CLICKHOUSE_FUNCTIONS.get(func); + } else { + newFunc = null; + } + if (newFunc != null) { + functionCallExpr.setFnName(FunctionName.createBuiltinName(newFunc)); + func = functionCallExpr.getFnName().getFunction(); + } + } + return func; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcScanNode.java index 94b6aeb16034f7..ccba1a165fe08f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcScanNode.java +++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/jdbc/JdbcScanNode.java @@ -19,6 +19,7 @@ import org.apache.doris.analysis.Analyzer; import org.apache.doris.analysis.BinaryPredicate; +import org.apache.doris.analysis.BoolLiteral; import org.apache.doris.analysis.DateLiteral; import org.apache.doris.analysis.Expr; import org.apache.doris.analysis.ExprSubstitutionMap; @@ -113,7 +114,7 @@ private void getGraphQueryString() { break; } } - //clean conjusts cause graph sannnode no need conjuncts + // clean conjusts cause graph sannnode no need conjuncts conjuncts = Lists.newArrayList(); } @@ -133,19 +134,32 @@ private void createJdbcFilters() { } ArrayList conjunctsList = Expr.cloneList(conjuncts, sMap); + List errors = Lists.newArrayList(); + List pushDownConjuncts = collectConjunctsToPushDown(conjunctsList, errors); + + for (Expr individualConjunct : pushDownConjuncts) { + String filter = conjunctExprToString(jdbcType, individualConjunct); + filters.add(filter); + conjuncts.remove(individualConjunct); + } + } + + private List collectConjunctsToPushDown(List conjunctsList, List errors) { + List pushDownConjuncts = new ArrayList<>(); for (Expr p : conjunctsList) { if (shouldPushDownConjunct(jdbcType, p)) { - String filter = conjunctExprToString(jdbcType, p); - if (filter.equals("TRUE")) { - filter = "1 = 1"; - } - if (JdbcFunctionPushDownRule.isUnsupportedFunctions(jdbcType, filter)) { - continue; + List individualConjuncts = p.getConjuncts(); + for (Expr individualConjunct : individualConjuncts) { + Expr newp = JdbcFunctionPushDownRule.processFunctions(jdbcType, individualConjunct, errors); + if (!errors.isEmpty()) { + errors.clear(); + continue; + } + pushDownConjuncts.add(newp); } - filters.add(filter); - conjuncts.remove(p); } } + return pushDownConjuncts; } private void createJdbcColumns() { @@ -282,7 +296,7 @@ protected String debugString() { @Override public int getNumInstances() { return ConnectContext.get().getSessionVariable().getEnablePipelineEngine() - ? ConnectContext.get().getSessionVariable().getParallelExecInstanceNum() : 1; + ? ConnectContext.get().getSessionVariable().getParallelExecInstanceNum() : 1; } @Override @@ -292,17 +306,22 @@ public StatsDelta genStatsDelta() throws AnalysisException { tbl.getId(), -1L); } - // Now some database have different function call like doris, now doris do not - // push down the function call except MYSQL - public static boolean shouldPushDownConjunct(TOdbcTableType tableType, Expr expr) { - if (!tableType.equals(TOdbcTableType.MYSQL)) { - List fnExprList = Lists.newArrayList(); - expr.collect(FunctionCallExpr.class, fnExprList); - if (!fnExprList.isEmpty()) { + private static boolean shouldPushDownConjunct(TOdbcTableType tableType, Expr expr) { + if (containsFunctionCallExpr(expr)) { + if (tableType.equals(TOdbcTableType.MYSQL) || tableType.equals(TOdbcTableType.CLICKHOUSE)) { + return Config.enable_func_pushdown; + } else { return false; } + } else { + return true; } - return Config.enable_func_pushdown; + } + + private static boolean containsFunctionCallExpr(Expr expr) { + List fnExprList = Lists.newArrayList(); + expr.collect(FunctionCallExpr.class, fnExprList); + return !fnExprList.isEmpty(); } public static String conjunctExprToString(TOdbcTableType tableType, Expr expr) { @@ -338,6 +357,11 @@ public static String conjunctExprToString(TOdbcTableType tableType, Expr expr) { } } + // only for old planner + if (expr.contains(BoolLiteral.class) && expr.getStringValue().equals("1") && expr.getChildren().isEmpty()) { + return "1 = 1"; + } + return expr.toMySql(); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/odbc/OdbcScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/odbc/OdbcScanNode.java index 0f27e9da580bb4..bf4e835e4f1035 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/odbc/OdbcScanNode.java +++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/odbc/OdbcScanNode.java @@ -20,6 +20,7 @@ import org.apache.doris.analysis.Analyzer; import org.apache.doris.analysis.Expr; import org.apache.doris.analysis.ExprSubstitutionMap; +import org.apache.doris.analysis.FunctionCallExpr; import org.apache.doris.analysis.SlotDescriptor; import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.TupleDescriptor; @@ -28,6 +29,7 @@ import org.apache.doris.catalog.JdbcTable; import org.apache.doris.catalog.OdbcTable; import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.Config; import org.apache.doris.common.UserException; import org.apache.doris.planner.PlanNodeId; import org.apache.doris.planner.external.ExternalScanNode; @@ -181,7 +183,7 @@ private void createOdbcFilters(Analyzer analyzer) { } ArrayList odbcConjuncts = Expr.cloneList(conjuncts, sMap); for (Expr p : odbcConjuncts) { - if (JdbcScanNode.shouldPushDownConjunct(odbcType, p)) { + if (shouldPushDownConjunct(odbcType, p)) { String filter = JdbcScanNode.conjunctExprToString(odbcType, p); filters.add(filter); conjuncts.remove(p); @@ -224,4 +226,15 @@ public int getNumInstances() { return ConnectContext.get().getSessionVariable().getEnablePipelineEngine() ? ConnectContext.get().getSessionVariable().getParallelExecInstanceNum() : 1; } + + public static boolean shouldPushDownConjunct(TOdbcTableType tableType, Expr expr) { + if (!tableType.equals(TOdbcTableType.MYSQL)) { + List fnExprList = Lists.newArrayList(); + expr.collect(FunctionCallExpr.class, fnExprList); + if (!fnExprList.isEmpty()) { + return false; + } + } + return Config.enable_func_pushdown; + } } diff --git a/regression-test/data/external_table_p0/jdbc/test_clickhouse_jdbc_catalog.out b/regression-test/data/external_table_p0/jdbc/test_clickhouse_jdbc_catalog.out index 0b847974ef9387170a46fd50ad337775c4d5e6ba..05372e3f0af9d16c6158e62adee6b887b2a13358 100644 GIT binary patch delta 43 ycmX>ndQWu2AI{0#T!-a#brlrTO7oKA3rdSK6m)gD3^@(WEKN*|EzC`fxwrsFn+#t7 delta 20 ccmca7dQNo1A5M<=f};H7)Z*gFUpY?!0AKhCO#lD@ diff --git a/regression-test/data/external_table_p0/jdbc/test_mysql_jdbc_catalog.out b/regression-test/data/external_table_p0/jdbc/test_mysql_jdbc_catalog.out index 5037157a245a19..d5cc90fa80f0d0 100644 --- a/regression-test/data/external_table_p0/jdbc/test_mysql_jdbc_catalog.out +++ b/regression-test/data/external_table_p0/jdbc/test_mysql_jdbc_catalog.out @@ -238,29 +238,6 @@ VIEWS \N 2023-06-17T10:00 --- !filter1 -- -1 6 1 1 2099.18 3 8 1554296.82 68781940.49 d 8 5 0 d a 7 9 - --- !filter2 -- -1 6 1 1 2099.18 3 8 1554296.82 68781940.49 d 8 5 0 d a 7 9 -2 8 9 8 2900.42 1 6 97486621.73 59634489.39 c 3 2 0 a e 7 4 -3 5 7 3 6276.86 8 9 32758730.38 10260499.72 c 8 1 0 d c 9 2 -4 3 7 5 2449.00 6 3 91359059.28 64743145.92 e 7 8 0 b d 8 4 -5 6 4 5 9137.82 2 7 26526675.70 90098303.36 a 6 7 0 d e 4 1 -6 3 6 8 7601.25 4 9 49117098.47 46499188.80 c 3 3 0 c d 4 8 -7 3 2 8 5297.81 9 3 23753694.20 96930000.64 c 7 2 0 b e 1 5 -8 3 6 7 3683.85 5 7 26056250.91 1127755.43 b 7 6 0 d b 4 7 -9 3 9 1 4785.38 1 5 95199488.12 94869703.42 a 4 4 0 c d 2 4 - --- !filter3 -- -1 6 1 1 2099.18 3 8 1554296.82 68781940.49 d 8 5 0 d a 7 9 - --- !date_trunc -- -2023-06-17T10:00 - --- !money_format -- -1 - -- !test_insert1 -- doris1 18 @@ -305,3 +282,26 @@ sys -- !mysql_view -- 10086 4294967295 201 +-- !filter1 -- +1 6 1 1 2099.18 3 8 1554296.82 68781940.49 d 8 5 0 d a 7 9 + +-- !filter2 -- +1 6 1 1 2099.18 3 8 1554296.82 68781940.49 d 8 5 0 d a 7 9 +2 8 9 8 2900.42 1 6 97486621.73 59634489.39 c 3 2 0 a e 7 4 +3 5 7 3 6276.86 8 9 32758730.38 10260499.72 c 8 1 0 d c 9 2 +4 3 7 5 2449.00 6 3 91359059.28 64743145.92 e 7 8 0 b d 8 4 +5 6 4 5 9137.82 2 7 26526675.70 90098303.36 a 6 7 0 d e 4 1 +6 3 6 8 7601.25 4 9 49117098.47 46499188.80 c 3 3 0 c d 4 8 +7 3 2 8 5297.81 9 3 23753694.20 96930000.64 c 7 2 0 b e 1 5 +8 3 6 7 3683.85 5 7 26056250.91 1127755.43 b 7 6 0 d b 4 7 +9 3 9 1 4785.38 1 5 95199488.12 94869703.42 a 4 4 0 c d 2 4 + +-- !filter3 -- +1 6 1 1 2099.18 3 8 1554296.82 68781940.49 d 8 5 0 d a 7 9 + +-- !date_trunc -- +2023-06-17T10:00 + +-- !money_format -- +1 + diff --git a/regression-test/suites/external_table_p0/jdbc/test_clickhouse_jdbc_catalog.groovy b/regression-test/suites/external_table_p0/jdbc/test_clickhouse_jdbc_catalog.groovy index 9c2050ab70100e..b01f200574321f 100644 --- a/regression-test/suites/external_table_p0/jdbc/test_clickhouse_jdbc_catalog.groovy +++ b/regression-test/suites/external_table_p0/jdbc/test_clickhouse_jdbc_catalog.groovy @@ -68,6 +68,15 @@ suite("test_clickhouse_jdbc_catalog", "p0,external,clickhouse,external_docker,ex order_qt_final1 """select * from final_test""" sql "set jdbc_clickhouse_query_final = false;" order_qt_final2 """select * from final_test""" + order_qt_func_push """select * from ts where from_unixtime(ts,'yyyyMMdd') >= '2022-01-01';""" + explain { + sql("select * from ts where from_unixtime(ts,'yyyyMMdd') >= '2022-01-01';") + contains """QUERY: SELECT "id", "ts" FROM "doris_test"."ts" WHERE (FROM_UNIXTIME(ts, '%Y%m%d') >= '2022-01-01')""" + } + explain { + sql("select * from ts where nvl(ts,null) >= '2022-01-01';") + contains """QUERY: SELECT "id", "ts" FROM "doris_test"."ts"""" + } sql """ drop catalog if exists ${catalog_name} """ } diff --git a/regression-test/suites/external_table_p0/jdbc/test_mysql_jdbc_catalog.groovy b/regression-test/suites/external_table_p0/jdbc/test_mysql_jdbc_catalog.groovy index cc8f4fd0fe49aa..3717f17bec9115 100644 --- a/regression-test/suites/external_table_p0/jdbc/test_mysql_jdbc_catalog.groovy +++ b/regression-test/suites/external_table_p0/jdbc/test_mysql_jdbc_catalog.groovy @@ -112,11 +112,6 @@ suite("test_mysql_jdbc_catalog", "p0,external,mysql,external_docker,external_doc order_qt_auto_default_t """insert into ${auto_default_t}(name) values('a'); """ order_qt_dt """select * from ${dt}; """ order_qt_dt_null """select * from ${dt_null} order by 1; """ - order_qt_filter1 """select * from ${ex_tb17} where id = 1; """ - order_qt_filter2 """select * from ${ex_tb17} where 1=1 order by 1; """ - order_qt_filter3 """select * from ${ex_tb17} where id = 1 and 1 = 1; """ - order_qt_date_trunc """ SELECT timestamp0 from dt where DATE_TRUNC(date_sub(timestamp0,INTERVAL 9 HOUR),'hour') > '2011-03-03 17:39:05'; """ - order_qt_money_format """ select k8 from test1 where money_format(k8) = '1.00'; """ // test insert String uuid1 = UUID.randomUUID().toString(); @@ -238,8 +233,60 @@ suite("test_mysql_jdbc_catalog", "p0,external,mysql,external_docker,external_doc "jdbc.driver_url" = "${driver_url}", "jdbc.driver_class" = "com.mysql.cj.jdbc.Driver"); """ - qt_mysql_view """ select * from view_catalog.doris_test.mysql_view order by col_1;""" - sql """ drop catalog if exists view_catalog; """ + qt_mysql_view """ select * from view_catalog.doris_test.mysql_view order by col_1;""" + sql """ drop catalog if exists view_catalog; """ + + sql """ drop catalog if exists mysql_fun_push_catalog """ + sql """ CREATE CATALOG mysql_fun_push_catalog PROPERTIES ( + "type"="jdbc", + "jdbc.user"="root", + "jdbc.password"="123456", + "jdbc.jdbc_url" = "jdbc:mysql://${externalEnvIp}:${mysql_port}/doris_test?useSSL=false", + "jdbc.driver_url" = "${driver_url}", + "jdbc.driver_class" = "com.mysql.cj.jdbc.Driver"); + """ + + sql """switch mysql_fun_push_catalog""" + sql """ use ${ex_db_name}""" + sql """ admin set frontend config ("enable_func_pushdown" = "true"); """ + order_qt_filter1 """select * from ${ex_tb17} where id = 1; """ + order_qt_filter2 """select * from ${ex_tb17} where 1=1 order by 1; """ + order_qt_filter3 """select * from ${ex_tb17} where id = 1 and 1 = 1; """ + order_qt_date_trunc """ SELECT timestamp0 from dt where DATE_TRUNC(date_sub(timestamp0,INTERVAL 9 HOUR),'hour') > '2011-03-03 17:39:05'; """ + order_qt_money_format """ select k8 from test1 where money_format(k8) = '1.00'; """ + explain { + sql("select k8 from test1 where money_format(k8) = '1.00';") + + contains "QUERY: SELECT `k8` FROM `doris_test`.`test1`" + } + explain { + sql ("SELECT timestamp0 from dt where DATE_TRUNC(date_sub(timestamp0,INTERVAL 9 HOUR),'hour') > '2011-03-03 17:39:05';") + + contains "QUERY: SELECT `timestamp0` FROM `doris_test`.`dt`" + } + explain { + sql ("SELECT timestamp0 from dt where DATE_TRUNC(date_sub(timestamp0,INTERVAL 9 HOUR),'hour') > '2011-03-03 17:39:05' and timestamp0 > '2022-01-01';;") + + contains "QUERY: SELECT `timestamp0` FROM `doris_test`.`dt` WHERE (timestamp0 > '2022-01-01 00:00:00')" + } + explain { + sql ("select k6, k8 from test1 where nvl(k6, null) = 1;") + + contains "QUERY: SELECT `k6`, `k8` FROM `doris_test`.`test1` WHERE (ifnull(k6, NULL) = 1)" + } + explain { + sql ("select k6, k8 from test1 where nvl(nvl(k6, null),null) = 1;") + + contains "QUERY: SELECT `k6`, `k8` FROM `doris_test`.`test1` WHERE (ifnull(ifnull(k6, NULL), NULL) = 1)" + } + sql """ admin set frontend config ("enable_func_pushdown" = "false"); """ + explain { + sql ("select k6, k8 from test1 where nvl(k6, null) = 1 and k8 = 1;") + + contains "QUERY: SELECT `k6`, `k8` FROM `doris_test`.`test1` WHERE (k8 = 1)" + } + sql """ admin set frontend config ("enable_func_pushdown" = "true"); """ + sql """ drop catalog if exists mysql_fun_push_catalog; """ } } From 298bf0885d61e8c0ddbf96e25e2f22f7869a0d4d Mon Sep 17 00:00:00 2001 From: starocean999 <40539150+starocean999@users.noreply.github.com> Date: Fri, 15 Sep 2023 22:50:36 +0800 Subject: [PATCH 02/33] [fix](nereids) correlated anti join shouldn't be translated to null aware anti join (#24290) original SQL select t1.* from t1 where t1.k1 not in ( select t3.k1 from t3 where t1.k2 = t3.k2 ); rewrite SQL before (wrong): select t1.* from t1 null aware left anti join t2 on t1.k1 = t3.k1 and t1.k2 = t3.k2; now (correct): select t1.* from t1 left anti join t3 on t1.k2 = t3.k2 and (t1.k1 = t3.k1 or t3.k1 is null or t1.k1 is null); --- .../nereids/rules/rewrite/InApplyToJoin.java | 28 +++++++++++------ .../test_subquery_in_disjunction.groovy | 31 ++++++++++--------- .../sub_query_correlated.groovy | 7 +++-- 3 files changed, 38 insertions(+), 28 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/InApplyToJoin.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/InApplyToJoin.java index 2c753972badf97..5a1ba7e22e339b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/InApplyToJoin.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/InApplyToJoin.java @@ -24,6 +24,7 @@ import org.apache.doris.nereids.trees.expressions.EqualTo; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.InSubquery; +import org.apache.doris.nereids.trees.expressions.IsNull; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Not; import org.apache.doris.nereids.trees.expressions.functions.agg.BitmapUnion; @@ -92,26 +93,33 @@ select t1.k1 from bigtable t1 left semi join (select bitmap_union(k2) x from bit } //in-predicate to equal + InSubquery inSubquery = ((InSubquery) apply.getSubqueryExpr()); Expression predicate; - Expression left = ((InSubquery) apply.getSubqueryExpr()).getCompareExpr(); + Expression left = inSubquery.getCompareExpr(); // TODO: trick here, because when deep copy logical plan the apply right child // is not same with query plan in subquery expr, since the scan node copy twice - Expression right = apply.getSubqueryExpr().getSubqueryOutput((LogicalPlan) apply.right()); + Expression right = inSubquery.getSubqueryOutput((LogicalPlan) apply.right()); if (apply.isCorrelated()) { - predicate = ExpressionUtils.and(new EqualTo(left, right), - apply.getCorrelationFilter().get()); + if (inSubquery.isNot()) { + predicate = ExpressionUtils.and(ExpressionUtils.or(new EqualTo(left, right), + new IsNull(left), new IsNull(right)), + apply.getCorrelationFilter().get()); + } else { + predicate = ExpressionUtils.and(new EqualTo(left, right), + apply.getCorrelationFilter().get()); + } } else { predicate = new EqualTo(left, right); } List conjuncts = ExpressionUtils.extractConjunction(predicate); - if (((InSubquery) apply.getSubqueryExpr()).isNot()) { + if (inSubquery.isNot()) { return new LogicalJoin<>( - predicate.nullable() ? JoinType.NULL_AWARE_LEFT_ANTI_JOIN : JoinType.LEFT_ANTI_JOIN, - Lists.newArrayList(), - conjuncts, - JoinHint.NONE, apply.getMarkJoinSlotReference(), - apply.children()); + predicate.nullable() && !apply.isCorrelated() + ? JoinType.NULL_AWARE_LEFT_ANTI_JOIN + : JoinType.LEFT_ANTI_JOIN, + Lists.newArrayList(), conjuncts, JoinHint.NONE, + apply.getMarkJoinSlotReference(), apply.children()); } else { return new LogicalJoin<>(JoinType.LEFT_SEMI_JOIN, Lists.newArrayList(), conjuncts, diff --git a/regression-test/suites/correctness/test_subquery_in_disjunction.groovy b/regression-test/suites/correctness/test_subquery_in_disjunction.groovy index 2178fec89363e2..2decf0583f2800 100644 --- a/regression-test/suites/correctness/test_subquery_in_disjunction.groovy +++ b/regression-test/suites/correctness/test_subquery_in_disjunction.groovy @@ -97,21 +97,22 @@ suite("test_subquery_in_disjunction") { SELECT * FROM test_sq_dj1 WHERE c1 IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 < test_sq_dj2.c2) OR c1 < 11 ORDER BY c1; """ - qt_hash_join_with_other_conjuncts5 """ - SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 > test_sq_dj2.c2) OR c1 < 10 ORDER BY c1; - """ - - qt_hash_join_with_other_conjuncts6 """ - SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 < test_sq_dj2.c2) OR c1 < 10 ORDER BY c1; - """ - - qt_hash_join_with_other_conjuncts7 """ - SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 > test_sq_dj2.c2) OR c1 < 11 ORDER BY c1; - """ - - qt_hash_join_with_other_conjuncts8 """ - SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 < test_sq_dj2.c2) OR c1 < 11 ORDER BY c1; - """ + // TODO: enable this after DORIS-7051 and DORIS-7052 is fixed + // qt_hash_join_with_other_conjuncts5 """ + // SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 > test_sq_dj2.c2) OR c1 < 10 ORDER BY c1; + // """ + + // qt_hash_join_with_other_conjuncts6 """ + // SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 < test_sq_dj2.c2) OR c1 < 10 ORDER BY c1; + // """ + + // qt_hash_join_with_other_conjuncts7 """ + // SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 > test_sq_dj2.c2) OR c1 < 11 ORDER BY c1; + // """ + + // qt_hash_join_with_other_conjuncts8 """ + // SELECT * FROM test_sq_dj1 WHERE c1 NOT IN (SELECT c1 FROM test_sq_dj2 WHERE test_sq_dj1.c1 < test_sq_dj2.c2) OR c1 < 11 ORDER BY c1; + // """ qt_same_subquery_in_conjuncts """ SELECT * FROM test_sq_dj1 WHERE c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 IN (SELECT c1 FROM test_sq_dj2) OR c1 < 10 ORDER BY c1; diff --git a/regression-test/suites/nereids_syntax_p0/sub_query_correlated.groovy b/regression-test/suites/nereids_syntax_p0/sub_query_correlated.groovy index c7dcffb1bef572..30f93b857a6834 100644 --- a/regression-test/suites/nereids_syntax_p0/sub_query_correlated.groovy +++ b/regression-test/suites/nereids_syntax_p0/sub_query_correlated.groovy @@ -462,7 +462,8 @@ suite ("sub_query_correlated") { OR k1 < 10; """ - order_qt_doris_6937_2 """ - select * from sub_query_correlated_subquery1 where sub_query_correlated_subquery1.k1 not in (select sub_query_correlated_subquery3.k3 from sub_query_correlated_subquery3 where sub_query_correlated_subquery3.v2 > sub_query_correlated_subquery1.k2) or k1 < 10 order by k1, k2; - """ + // uncomment this after DORIS-7051 is fixed + // order_qt_doris_6937_2 """ + // select * from sub_query_correlated_subquery1 where sub_query_correlated_subquery1.k1 not in (select sub_query_correlated_subquery3.k3 from sub_query_correlated_subquery3 where sub_query_correlated_subquery3.v2 > sub_query_correlated_subquery1.k2) or k1 < 10 order by k1, k2; + // """ } From cac089c7cda186d74c359c4d4d788be1bee28ec1 Mon Sep 17 00:00:00 2001 From: yujun Date: Sat, 16 Sep 2023 09:52:20 +0800 Subject: [PATCH 03/33] [fix](compile) fix mac compile sort failed #24453 --- be/src/olap/storage_engine.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/be/src/olap/storage_engine.cpp b/be/src/olap/storage_engine.cpp index 6353efaec757f5..0c517bf42cf998 100644 --- a/be/src/olap/storage_engine.cpp +++ b/be/src/olap/storage_engine.cpp @@ -449,7 +449,7 @@ std::vector StorageEngine::get_stores_for_create_tablet( int tablet_num; - bool operator<(const DirInfo& other) { + bool operator<(const DirInfo& other) const { if (available_level != other.available_level) { return available_level < other.available_level; } From 4dad7c94da364498759b620798e0723c7ff788bf Mon Sep 17 00:00:00 2001 From: Mingyu Chen Date: Sat, 16 Sep 2023 09:57:39 +0800 Subject: [PATCH 04/33] [fix](orc) fix the count(*) pushdown issue in orc format (#24446) In previous, when querying hive table in orc format, and the file is splitted. the result of select count(*) may be multiple of the real row number. This is because the number of rows should be got after orc strip prune, otherwise, it may return wrong result --- be/src/apache-orc | 2 +- be/src/vec/exec/format/orc/vorc_reader.cpp | 5 +- be/src/vec/exec/format/orc/vorc_reader.h | 2 + .../HdfsTableValuedFunction.java | 2 +- .../tvf/test_hdfs_tvf_compression.out | 18 ++++++ .../tvf/test_hdfs_tvf_compression.groovy | 63 +++++++++++++++++++ 6 files changed, 88 insertions(+), 4 deletions(-) diff --git a/be/src/apache-orc b/be/src/apache-orc index 78bbe2e41f2140..a7c0af50f8ca8f 160000 --- a/be/src/apache-orc +++ b/be/src/apache-orc @@ -1 +1 @@ -Subproject commit 78bbe2e41f2140b803855d683fae5e1a4b734a37 +Subproject commit a7c0af50f8ca8ff7cddaf8675473a037f8b13143 diff --git a/be/src/vec/exec/format/orc/vorc_reader.cpp b/be/src/vec/exec/format/orc/vorc_reader.cpp index deb48bcc0b9d98..06f41a2edcd769 100644 --- a/be/src/vec/exec/format/orc/vorc_reader.cpp +++ b/be/src/vec/exec/format/orc/vorc_reader.cpp @@ -245,8 +245,6 @@ Status OrcReader::_create_file_reader() { } return Status::InternalError("Init OrcReader failed. reason = {}", _err_msg); } - _remaining_rows = _reader->getNumberOfRows(); - return Status::OK(); } @@ -789,6 +787,9 @@ Status OrcReader::set_fill_columns( auto& selected_type = _row_reader->getSelectedType(); int idx = 0; _init_select_types(selected_type, idx); + + _remaining_rows = _row_reader->getNumberOfRows(); + } catch (std::exception& e) { return Status::InternalError("Failed to create orc row reader. reason = {}", e.what()); } diff --git a/be/src/vec/exec/format/orc/vorc_reader.h b/be/src/vec/exec/format/orc/vorc_reader.h index a9b564f560629e..133c92e7d164c1 100644 --- a/be/src/vec/exec/format/orc/vorc_reader.h +++ b/be/src/vec/exec/format/orc/vorc_reader.h @@ -489,6 +489,8 @@ class OrcReader : public GenericReader { void set_remaining_rows(int64_t rows) { _remaining_rows = rows; } private: + // This is only for count(*) short circuit read. + // save the total number of rows in range int64_t _remaining_rows = 0; RuntimeProfile* _profile = nullptr; RuntimeState* _state = nullptr; diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/HdfsTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/HdfsTableValuedFunction.java index 385d9d11adf13b..55c898b29f944e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/HdfsTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/HdfsTableValuedFunction.java @@ -70,7 +70,7 @@ public HdfsTableValuedFunction(Map params) throws AnalysisExcept // because HADOOP_FS_NAME contains upper and lower case locationProperties.put(HdfsResource.HADOOP_FS_NAME, params.get(key)); } else { - throw new AnalysisException(key + " is invalid property"); + locationProperties.put(key, params.get(key)); } } diff --git a/regression-test/data/external_table_p2/tvf/test_hdfs_tvf_compression.out b/regression-test/data/external_table_p2/tvf/test_hdfs_tvf_compression.out index a92e6f28cb418e..6d92ffffc2fa52 100644 --- a/regression-test/data/external_table_p2/tvf/test_hdfs_tvf_compression.out +++ b/regression-test/data/external_table_p2/tvf/test_hdfs_tvf_compression.out @@ -248,3 +248,21 @@ c133 TEXT Yes false \N NONE -- !plain_2 -- +-- !count_parquet_0 -- +1062734 + +-- !count_parquet_1 -- +1062734 + +-- !count_orc_0 -- +2777636 + +-- !count_orc_1 -- +2777636 + +-- !count_text_0 -- +144730 + +-- !count_text_1 -- +144730 + diff --git a/regression-test/suites/external_table_p2/tvf/test_hdfs_tvf_compression.groovy b/regression-test/suites/external_table_p2/tvf/test_hdfs_tvf_compression.groovy index 2f07106957eb8c..40dc3c244053fb 100644 --- a/regression-test/suites/external_table_p2/tvf/test_hdfs_tvf_compression.groovy +++ b/regression-test/suites/external_table_p2/tvf/test_hdfs_tvf_compression.groovy @@ -105,7 +105,70 @@ suite("test_hdfs_tvf_compression", "p2,external,tvf,external_remote,external_rem "column_separator" = '\001', "compress_type" = "plain") where c2="abc" order by c3,c4,c10 limit 5; """ + + // test count(*) push down + def test_data_dir = "hdfs://${nameNodeHost}:${hdfsPort}" + // parquet + sql """set file_split_size=0;""" + qt_count_parquet_0 """ + select count(*) from + HDFS( + "uri" = "${test_data_dir}/test_data/ckbench_hits.part-00000.snappy.parquet", + "fs.defaultFS" = "${baseFs}", + "format" = "parquet" + ); + """ + + sql """set file_split_size=388608;""" + qt_count_parquet_1 """ + select count(*) from + HDFS( + "uri" = "${test_data_dir}/test_data/ckbench_hits.part-00000.snappy.parquet", + "fs.defaultFS" = "${baseFs}", + "format" = "parquet" + ); + """ + + // orc + sql """set file_split_size=0;""" + qt_count_orc_0 """ + select count(*) from + HDFS( + "uri" = "${test_data_dir}/test_data/ckbench_hits.000000_0.orc", + "fs.defaultFS" = "${baseFs}", + "format" = "orc" + ); + """ + + sql """set file_split_size=388608;""" + qt_count_orc_1 """ + select count(*) from + HDFS( + "uri" = "${test_data_dir}/test_data/ckbench_hits.000000_0.orc", + "fs.defaultFS" = "${baseFs}", + "format" = "orc" + ); + """ + // text + sql """set file_split_size=0;""" + qt_count_text_0 """ + select count(*) from + HDFS( + "uri" = "${test_data_dir}/test_data/tpcds_catalog_returns_data-m-00000.txt", + "fs.defaultFS" = "${baseFs}", + "format" = "csv" + ); + """ + sql """set file_split_size=388608;""" + qt_count_text_1 """ + select count(*) from + HDFS( + "uri" = "${test_data_dir}/test_data/tpcds_catalog_returns_data-m-00000.txt", + "fs.defaultFS" = "${baseFs}", + "format" = "csv" + ); + """ } } From 81b6ab9b68e61e8033521069c8fb6441ec319cee Mon Sep 17 00:00:00 2001 From: lihangyu <15605149486@163.com> Date: Sat, 16 Sep 2023 10:01:36 +0800 Subject: [PATCH 05/33] [Fix](topn opt) only allow duplicate key or MOW model to use 2 phase read opt in nereids planner (#24485) The fetch phase is not support aggregation at present --- .../rewrite/DeferMaterializeTopNResult.java | 2 + .../suites/query_p0/sort/sort.groovy | 8 ++ .../suites/query_p0/sort/topn_2pr_rule.groovy | 74 +++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 regression-test/suites/query_p0/sort/topn_2pr_rule.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/DeferMaterializeTopNResult.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/DeferMaterializeTopNResult.java index 4cd41bd509afde..15516e0501f490 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/DeferMaterializeTopNResult.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/DeferMaterializeTopNResult.java @@ -60,6 +60,7 @@ public List buildRules() { .when(r -> r.child().getOrderKeys().stream().map(OrderKey::getExpr) .allMatch(Expression::isColumnFromTable)) .when(r -> r.child().child().getTable().getEnableLightSchemaChange()) + .when(r -> r.child().child().getTable().isDupKeysOrMergeOnWrite()) .then(r -> deferMaterialize(r, r.child(), Optional.empty(), r.child().child())) ), RuleType.DEFER_MATERIALIZE_TOP_N_RESULT.build( @@ -69,6 +70,7 @@ public List buildRules() { .when(r -> r.child().getOrderKeys().stream().map(OrderKey::getExpr) .allMatch(Expression::isColumnFromTable)) .when(r -> r.child().child().child().getTable().getEnableLightSchemaChange()) + .when(r -> r.child().child().child().getTable().isDupKeysOrMergeOnWrite()) .then(r -> { LogicalFilter filter = r.child().child(); return deferMaterialize(r, r.child(), Optional.of(filter), filter.child()); diff --git a/regression-test/suites/query_p0/sort/sort.groovy b/regression-test/suites/query_p0/sort/sort.groovy index daa1328ae8282c..5ae1beb317a8cc 100644 --- a/regression-test/suites/query_p0/sort/sort.groovy +++ b/regression-test/suites/query_p0/sort/sort.groovy @@ -101,6 +101,10 @@ suite("sort") { qt_sql_orderby_non_overlap_desc """ select * from sort_non_overlap order by time_period desc limit 4; """ + + // test topn 2phase opt with light schema change + sql """set topn_opt_limit_threshold = 1024""" + sql """set enable_two_phase_read_opt= true""" sql """ DROP TABLE if exists `sort_default_value`; """ sql """ CREATE TABLE `sort_default_value` ( `k1` int NOT NULL @@ -118,4 +122,8 @@ suite("sort") { sql "insert into sort_default_value values (3, 0)" sql "insert into sort_default_value values (4, null)" qt_sql "select * from sort_default_value order by k1 limit 10" + explain { + sql("select * from sort_default_value order by k1 limit 10") + contains "OPT TWO PHASE" + } } diff --git a/regression-test/suites/query_p0/sort/topn_2pr_rule.groovy b/regression-test/suites/query_p0/sort/topn_2pr_rule.groovy new file mode 100644 index 00000000000000..45b9d6f2500775 --- /dev/null +++ b/regression-test/suites/query_p0/sort/topn_2pr_rule.groovy @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("topn_2pr_rule") { + sql """set topn_opt_limit_threshold = 1024""" + sql """set enable_two_phase_read_opt= true""" + + def create_table = { table_name, key_type="DUPLICATE" -> + sql "DROP TABLE IF EXISTS ${table_name}" + value_type = "v string" + if ("${key_type}" == "AGGREGATE") { + value_type = "v string REPLACE_IF_NOT_NULL NULL" + } + sql """ + CREATE TABLE IF NOT EXISTS ${table_name} ( + k bigint, + ${value_type} + ) + ${key_type} KEY(`k`) + DISTRIBUTED BY HASH(k) BUCKETS 1 + properties("replication_num" = "1", "disable_auto_compaction" = "false"); + """ + } + def verify = { table_name, key_type-> + if("${key_type}" == "DUPLICATE") { + explain { + sql("select * from ${table_name} order by k limit 1;") + contains "OPT TWO PHASE" + } + explain { + sql("select * from ${table_name} where k = 1 order by k limit 1;") + contains "OPT TWO PHASE" + } + explain { + sql("select * from ${table_name} where k order by k + 1 limit 1;") + notContains "OPT TWO PHASE" + } + } else if("${key_type}" == "UNIQUE") { + explain { + sql("select * from ${table_name} order by k limit 1;") + notContains "OPT TWO PHASE" + } + } else if("${key_type}" == "AGGREGATE") { + explain { + sql("select * from ${table_name} order by k limit 1;") + notContains "OPT TWO PHASE" + } + } + } + + def key_types = ["DUPLICATE", "UNIQUE", "AGGREGATE"] + for (int i = 0; i < key_types.size(); i++) { + def table_name = "topn_2pr_rule_${key_types[i]}" + create_table.call(table_name, key_types[i]) + sql """insert into ${table_name} values(1, "1")""" + sql """insert into ${table_name} values(2, "2")""" + sql """insert into ${table_name} values(3, "3")""" + verify.call(table_name, key_types[i]) + } +} \ No newline at end of file From 8ce109bda7928815290e37fd84f0b3f77769bd9c Mon Sep 17 00:00:00 2001 From: Gabriel Date: Sat, 16 Sep 2023 14:28:20 +0800 Subject: [PATCH 06/33] [pipelineX](profile) phase 2: refine profile (#24467) --- .../exec/aggregation_sink_operator.cpp | 3 + .../exec/aggregation_source_operator.cpp | 59 +++++++++---------- .../pipeline/exec/analytic_sink_operator.cpp | 4 ++ .../exec/analytic_source_operator.cpp | 5 ++ .../exec/assert_num_rows_operator.cpp | 2 + .../pipeline/exec/exchange_sink_operator.cpp | 7 ++- .../exec/exchange_source_operator.cpp | 4 ++ be/src/pipeline/exec/hashjoin_build_sink.cpp | 19 +++--- be/src/pipeline/exec/hashjoin_build_sink.h | 1 - .../pipeline/exec/hashjoin_probe_operator.cpp | 21 ++++--- .../exec/join_build_sink_operator.cpp | 10 ++-- .../pipeline/exec/join_build_sink_operator.h | 1 - be/src/pipeline/exec/join_probe_operator.cpp | 10 ++-- be/src/pipeline/exec/join_probe_operator.h | 1 - .../exec/nested_loop_join_build_operator.cpp | 4 ++ .../exec/nested_loop_join_probe_operator.cpp | 7 +++ be/src/pipeline/exec/repeat_operator.cpp | 3 + be/src/pipeline/exec/repeat_operator.h | 1 + be/src/pipeline/exec/result_sink_operator.cpp | 7 +++ be/src/pipeline/exec/scan_operator.cpp | 29 +++++---- be/src/pipeline/exec/scan_operator.h | 1 - be/src/pipeline/exec/select_operator.h | 1 + be/src/pipeline/exec/sort_sink_operator.cpp | 4 ++ be/src/pipeline/exec/sort_source_operator.cpp | 7 ++- .../streaming_aggregation_sink_operator.cpp | 1 + be/src/pipeline/exec/union_sink_operator.cpp | 6 +- .../pipeline/exec/union_source_operator.cpp | 8 ++- be/src/pipeline/pipeline_x/operator.cpp | 14 +---- be/src/pipeline/pipeline_x/operator.h | 15 +++-- .../pipeline/pipeline_x/pipeline_x_task.cpp | 10 ++-- 30 files changed, 160 insertions(+), 105 deletions(-) diff --git a/be/src/pipeline/exec/aggregation_sink_operator.cpp b/be/src/pipeline/exec/aggregation_sink_operator.cpp index cc0eb3be1da372..a83d8bebd320cb 100644 --- a/be/src/pipeline/exec/aggregation_sink_operator.cpp +++ b/be/src/pipeline/exec/aggregation_sink_operator.cpp @@ -66,6 +66,7 @@ Status AggSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(Base::init(state, info)); SCOPED_TIMER(Base::profile()->total_time_counter()); + SCOPED_TIMER(Base::_open_timer); _agg_data = Base::_shared_state->agg_data.get(); _agg_arena_pool = Base::_shared_state->agg_arena_pool.get(); auto& p = Base::_parent->template cast(); @@ -160,6 +161,7 @@ Status AggSinkLocalState::init(RuntimeState* state, template Status AggSinkLocalState::open(RuntimeState* state) { SCOPED_TIMER(Base::profile()->total_time_counter()); + SCOPED_TIMER(Base::_open_timer); RETURN_IF_ERROR(Base::open(state)); _agg_data = Base::_shared_state->agg_data.get(); // move _create_agg_status to open not in during prepare, @@ -927,6 +929,7 @@ Status AggSinkOperatorX::sink(doris::RuntimeState* state, template Status AggSinkLocalState::close(RuntimeState* state) { SCOPED_TIMER(Base::profile()->total_time_counter()); + SCOPED_TIMER(Base::_close_timer); if (Base::_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/aggregation_source_operator.cpp b/be/src/pipeline/exec/aggregation_source_operator.cpp index 16125c424a4301..542346dc2d2c20 100644 --- a/be/src/pipeline/exec/aggregation_source_operator.cpp +++ b/be/src/pipeline/exec/aggregation_source_operator.cpp @@ -39,14 +39,15 @@ AggLocalState::AggLocalState(RuntimeState* state, OperatorXBase* parent) Status AggLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(Base::init(state, info)); - SCOPED_TIMER(Base::profile()->total_time_counter()); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); _agg_data = _shared_state->agg_data.get(); - _get_results_timer = ADD_TIMER(Base::profile(), "GetResultsTime"); - _serialize_result_timer = ADD_TIMER(Base::profile(), "SerializeResultTime"); - _hash_table_iterate_timer = ADD_TIMER(Base::profile(), "HashTableIterateTime"); - _insert_keys_to_column_timer = ADD_TIMER(Base::profile(), "InsertKeysToColumnTime"); - _serialize_data_timer = ADD_TIMER(Base::profile(), "SerializeDataTime"); - _hash_table_size_counter = ADD_COUNTER(Base::profile(), "HashTableSize", TUnit::UNIT); + _get_results_timer = ADD_TIMER(profile(), "GetResultsTime"); + _serialize_result_timer = ADD_TIMER(profile(), "SerializeResultTime"); + _hash_table_iterate_timer = ADD_TIMER(profile(), "HashTableIterateTime"); + _insert_keys_to_column_timer = ADD_TIMER(profile(), "InsertKeysToColumnTime"); + _serialize_data_timer = ADD_TIMER(profile(), "SerializeDataTime"); + _hash_table_size_counter = ADD_COUNTER(profile(), "HashTableSize", TUnit::UNIT); auto& p = _parent->template cast(); if (p._without_key) { if (p._needs_finalize) { @@ -83,16 +84,16 @@ void AggLocalState::_close_with_serialized_key() { auto& data = agg_method.data; data.for_each_mapped([&](auto& mapped) { if (mapped) { - Base::_dependency->destroy_agg_status(mapped); + _dependency->destroy_agg_status(mapped); mapped = nullptr; } }); if (data.has_null_key_data()) { - Base::_dependency->destroy_agg_status(data.get_null_key_data()); + _dependency->destroy_agg_status(data.get_null_key_data()); } }, _agg_data->method_variant); - Base::_dependency->release_tracker(); + _dependency->release_tracker(); } void AggLocalState::_close_without_key() { @@ -100,10 +101,10 @@ void AggLocalState::_close_without_key() { //but finally call close to destory agg data, if agg data has bitmapValue //will be core dump, it's not initialized if (_agg_data_created_without_key) { - Base::_dependency->destroy_agg_status(_agg_data->without_key); + _dependency->destroy_agg_status(_agg_data->without_key); _agg_data_created_without_key = false; } - Base::_dependency->release_tracker(); + _dependency->release_tracker(); } Status AggLocalState::_serialize_with_serialized_key_result(RuntimeState* state, @@ -128,8 +129,8 @@ Status AggLocalState::_serialize_with_serialized_key_result_with_spilt_data( _shared_state->spill_partition_helper->partition_count) { break; } - RETURN_IF_ERROR(Base::_dependency->reset_hash_table()); - RETURN_IF_ERROR(Base::_dependency->merge_spilt_data()); + RETURN_IF_ERROR(_dependency->reset_hash_table()); + RETURN_IF_ERROR(_dependency->merge_spilt_data()); _shared_state->aggregate_data_container->init_once(); } @@ -154,7 +155,7 @@ Status AggLocalState::_serialize_with_serialized_key_result_non_spill(RuntimeSta vectorized::DataTypes value_data_types(agg_size); // non-nullable column(id in `_make_nullable_keys`) will be converted to nullable. - bool mem_reuse = Base::_dependency->make_nullable_keys().empty() && block->mem_reuse(); + bool mem_reuse = _dependency->make_nullable_keys().empty() && block->mem_reuse(); vectorized::MutableColumns key_columns; for (int i = 0; i < key_size; ++i) { @@ -233,8 +234,8 @@ Status AggLocalState::_serialize_with_serialized_key_result_non_spill(RuntimeSta } _shared_state->aggregate_evaluators[i]->function()->serialize_to_column( _shared_state->values, - Base::_dependency->offsets_of_aggregate_states()[i], - value_columns[i], num_rows); + _dependency->offsets_of_aggregate_states()[i], value_columns[i], + num_rows); } } }, @@ -278,8 +279,8 @@ Status AggLocalState::_get_result_with_spilt_data(RuntimeState* state, vectorize _shared_state->spill_partition_helper->partition_count) { break; } - RETURN_IF_ERROR(Base::_dependency->reset_hash_table()); - RETURN_IF_ERROR(Base::_dependency->merge_spilt_data()); + RETURN_IF_ERROR(_dependency->reset_hash_table()); + RETURN_IF_ERROR(_dependency->merge_spilt_data()); _shared_state->aggregate_data_container->init_once(); } @@ -298,7 +299,7 @@ Status AggLocalState::_get_result_with_serialized_key_non_spill(RuntimeState* st vectorized::Block* block, SourceState& source_state) { // non-nullable column(id in `_make_nullable_keys`) will be converted to nullable. - bool mem_reuse = Base::_dependency->make_nullable_keys().empty() && block->mem_reuse(); + bool mem_reuse = _dependency->make_nullable_keys().empty() && block->mem_reuse(); auto columns_with_schema = vectorized::VectorizedUtils::create_columns_with_type_and_name( _parent->cast()._row_descriptor); @@ -356,8 +357,7 @@ Status AggLocalState::_get_result_with_serialized_key_non_spill(RuntimeState* st for (size_t i = 0; i < _shared_state->aggregate_evaluators.size(); ++i) { _shared_state->aggregate_evaluators[i]->insert_result_info_vec( - _shared_state->values, - Base::_dependency->offsets_of_aggregate_states()[i], + _shared_state->values, _dependency->offsets_of_aggregate_states()[i], value_columns[i].get(), num_rows); } @@ -372,8 +372,7 @@ Status AggLocalState::_get_result_with_serialized_key_non_spill(RuntimeState* st auto mapped = agg_method.data.get_null_key_data(); for (size_t i = 0; i < _shared_state->aggregate_evaluators.size(); ++i) _shared_state->aggregate_evaluators[i]->insert_result_info( - mapped + - Base::_dependency->offsets_of_aggregate_states()[i], + mapped + _dependency->offsets_of_aggregate_states()[i], value_columns[i].get()); source_state = SourceState::FINISHED; } @@ -426,7 +425,7 @@ Status AggLocalState::_serialize_without_key(RuntimeState* state, vectorized::Bl for (int i = 0; i < _shared_state->aggregate_evaluators.size(); ++i) { _shared_state->aggregate_evaluators[i]->function()->serialize_without_key_to_column( - _agg_data->without_key + Base::_dependency->offsets_of_aggregate_states()[i], + _agg_data->without_key + _dependency->offsets_of_aggregate_states()[i], *value_columns[i]); } @@ -463,8 +462,7 @@ Status AggLocalState::_get_without_key_result(RuntimeState* state, vectorized::B for (int i = 0; i < _shared_state->aggregate_evaluators.size(); ++i) { auto column = columns[i].get(); _shared_state->aggregate_evaluators[i]->insert_result_info( - _agg_data->without_key + Base::_dependency->offsets_of_aggregate_states()[i], - column); + _agg_data->without_key + _dependency->offsets_of_aggregate_states()[i], column); } const auto& block_schema = block->get_columns_with_type_and_name(); @@ -516,7 +514,7 @@ Status AggSourceOperatorX::get_block(RuntimeState* state, vectorized::Block* blo void AggLocalState::make_nullable_output_key(vectorized::Block* block) { if (block->rows() != 0) { - for (auto cid : Base::_dependency->make_nullable_keys()) { + for (auto cid : _dependency->make_nullable_keys()) { block->get_by_position(cid).column = make_nullable(block->get_by_position(cid).column); block->get_by_position(cid).type = make_nullable(block->get_by_position(cid).type); } @@ -524,8 +522,9 @@ void AggLocalState::make_nullable_output_key(vectorized::Block* block) { } Status AggLocalState::close(RuntimeState* state) { - SCOPED_TIMER(Base::profile()->total_time_counter()); - if (Base::_closed) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); + if (_closed) { return Status::OK(); } for (auto* aggregate_evaluator : _shared_state->aggregate_evaluators) { diff --git a/be/src/pipeline/exec/analytic_sink_operator.cpp b/be/src/pipeline/exec/analytic_sink_operator.cpp index ed0059946285a8..6d0fc9ced7e071 100644 --- a/be/src/pipeline/exec/analytic_sink_operator.cpp +++ b/be/src/pipeline/exec/analytic_sink_operator.cpp @@ -28,6 +28,8 @@ OPERATOR_CODE_GENERATOR(AnalyticSinkOperator, StreamingOperator) Status AnalyticSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(PipelineXSinkLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _shared_state->partition_by_column_idxs.resize(p._partition_by_eq_expr_ctxs.size()); _shared_state->ordey_by_column_idxs.resize(p._order_by_eq_expr_ctxs.size()); @@ -138,6 +140,8 @@ Status AnalyticSinkOperatorX::open(RuntimeState* state) { Status AnalyticSinkOperatorX::sink(doris::RuntimeState* state, vectorized::Block* input_block, SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); + COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)input_block->rows()); local_state._shared_state->input_eos = source_state == SourceState::FINISHED; if (local_state._shared_state->input_eos && input_block->rows() == 0) { local_state._shared_state->need_more_input = false; diff --git a/be/src/pipeline/exec/analytic_source_operator.cpp b/be/src/pipeline/exec/analytic_source_operator.cpp index 0f0e85c2ee061b..b3f7ccde2b97b6 100644 --- a/be/src/pipeline/exec/analytic_source_operator.cpp +++ b/be/src/pipeline/exec/analytic_source_operator.cpp @@ -38,6 +38,8 @@ AnalyticLocalState::AnalyticLocalState(RuntimeState* state, OperatorXBase* paren Status AnalyticLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(PipelineXLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); _agg_arena_pool = std::make_unique(); auto& p = _parent->cast(); @@ -390,6 +392,7 @@ Status AnalyticSourceOperatorX::init(const TPlanNode& tnode, RuntimeState* state Status AnalyticSourceOperatorX::get_block(RuntimeState* state, vectorized::Block* block, SourceState& source_state) { auto& local_state = state->get_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); if (local_state._shared_state->input_eos && (local_state._output_block_index == local_state._shared_state->input_blocks.size() || local_state._shared_state->input_total_rows == 0)) { @@ -430,6 +433,8 @@ Dependency* AnalyticSourceOperatorX::wait_for_dependency(RuntimeState* state) { } Status AnalyticLocalState::close(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); if (_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/assert_num_rows_operator.cpp b/be/src/pipeline/exec/assert_num_rows_operator.cpp index 315ecd3e51648e..70f2aeaa1bb233 100644 --- a/be/src/pipeline/exec/assert_num_rows_operator.cpp +++ b/be/src/pipeline/exec/assert_num_rows_operator.cpp @@ -38,6 +38,7 @@ AssertNumRowsOperatorX::AssertNumRowsOperatorX(ObjectPool* pool, const TPlanNode Status AssertNumRowsOperatorX::pull(doris::RuntimeState* state, vectorized::Block* block, SourceState& source_state) { auto& local_state = state->get_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); local_state.add_num_rows_returned(block->rows()); int64_t num_rows_returned = local_state.num_rows_returned(); bool assert_res = false; @@ -81,6 +82,7 @@ Status AssertNumRowsOperatorX::pull(doris::RuntimeState* state, vectorized::Bloc to_string_lambda(_assertion), _desired_num_rows, _subquery_string); } COUNTER_SET(local_state.rows_returned_counter(), local_state.num_rows_returned()); + COUNTER_UPDATE(local_state.blocks_returned_counter(), 1); return Status::OK(); } diff --git a/be/src/pipeline/exec/exchange_sink_operator.cpp b/be/src/pipeline/exec/exchange_sink_operator.cpp index 652cfdc5c417d5..1ded3eaa25775c 100644 --- a/be/src/pipeline/exec/exchange_sink_operator.cpp +++ b/be/src/pipeline/exec/exchange_sink_operator.cpp @@ -97,6 +97,8 @@ bool ExchangeSinkLocalState::transfer_large_data_by_brpc() const { Status ExchangeSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(PipelineXSinkLocalState<>::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); _sender_id = info.sender_id; _bytes_sent_counter = ADD_COUNTER(_profile, "BytesSent", TUnit::BYTES); @@ -144,8 +146,6 @@ Status ExchangeSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& inf channel_shared_ptrs[fragment_id_to_channel_index[fragment_instance_id.lo]]); } } - - SCOPED_TIMER(_profile->total_time_counter()); SCOPED_CONSUME_MEM_TRACKER(_mem_tracker.get()); int local_size = 0; @@ -273,6 +273,7 @@ Status ExchangeSinkOperatorX::sink(RuntimeState* state, vectorized::Block* block SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)block->rows()); + SCOPED_TIMER(local_state.profile()->total_time_counter()); local_state._peak_memory_usage_counter->set(_mem_tracker->peak_consumption()); bool all_receiver_eof = true; for (auto channel : local_state.channels) { @@ -533,6 +534,8 @@ Status ExchangeSinkOperatorX::try_close(RuntimeState* state) { } Status ExchangeSinkLocalState::close(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); if (_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/exchange_source_operator.cpp b/be/src/pipeline/exec/exchange_source_operator.cpp index 5cbb97d247144b..50e51f06ea92ec 100644 --- a/be/src/pipeline/exec/exchange_source_operator.cpp +++ b/be/src/pipeline/exec/exchange_source_operator.cpp @@ -46,6 +46,7 @@ ExchangeLocalState::ExchangeLocalState(RuntimeState* state, OperatorXBase* paren Status ExchangeLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(PipelineXLocalState<>::init(state, info)); SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); stream_recvr = state->exec_env()->vstream_mgr()->create_recvr( state, p.input_row_desc(), state->fragment_instance_id(), p.id(), p.num_senders(), @@ -72,6 +73,7 @@ Status ExchangeLocalState::init(RuntimeState* state, LocalStateInfo& info) { Status ExchangeLocalState::open(RuntimeState* state) { SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); RETURN_IF_ERROR(PipelineXLocalState<>::open(state)); return Status::OK(); } @@ -156,6 +158,7 @@ Status ExchangeSourceOperatorX::get_block(RuntimeState* state, vectorized::Block local_state.set_num_rows_returned(_limit); } COUNTER_SET(local_state.rows_returned_counter(), local_state.num_rows_returned()); + COUNTER_UPDATE(local_state.blocks_returned_counter(), 1); } if (eos) { source_state = SourceState::FINISHED; @@ -175,6 +178,7 @@ bool ExchangeSourceOperatorX::is_pending_finish(RuntimeState* /*state*/) const { Status ExchangeLocalState::close(RuntimeState* state) { SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); if (_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/hashjoin_build_sink.cpp b/be/src/pipeline/exec/hashjoin_build_sink.cpp index 4aa92cf0b3db2e..35614f2c6727d9 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.cpp +++ b/be/src/pipeline/exec/hashjoin_build_sink.cpp @@ -47,6 +47,8 @@ HashJoinBuildSinkLocalState::HashJoinBuildSinkLocalState(DataSinkOperatorXBase* Status HashJoinBuildSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(JoinBuildSinkLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _shared_state->join_op_variants = p._join_op_variants; _shared_state->probe_key_sz = p._build_key_sz; @@ -85,10 +87,9 @@ Status HashJoinBuildSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo profile()->AddHighWaterMarkCounter("BuildKeyArena", TUnit::BYTES, "MemoryUsage"); // Build phase - auto record_profile = _should_build_hash_table ? _build_phase_profile : faker_runtime_profile(); - _build_table_timer = ADD_CHILD_TIMER(_build_phase_profile, "BuildTableTime", "BuildTime"); - _build_side_merge_block_timer = - ADD_CHILD_TIMER(_build_phase_profile, "BuildSideMergeBlockTime", "BuildTime"); + auto record_profile = _should_build_hash_table ? profile() : faker_runtime_profile(); + _build_table_timer = ADD_TIMER(profile(), "BuildTableTime"); + _build_side_merge_block_timer = ADD_TIMER(profile(), "BuildSideMergeBlockTime"); _build_table_insert_timer = ADD_TIMER(record_profile, "BuildTableInsertTime"); _build_expr_call_timer = ADD_TIMER(record_profile, "BuildExprCallTime"); _build_table_expanse_timer = ADD_TIMER(record_profile, "BuildTableExpanseTime"); @@ -96,7 +97,6 @@ Status HashJoinBuildSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo _build_side_compute_hash_timer = ADD_TIMER(record_profile, "BuildSideHashComputingTime"); _build_runtime_filter_timer = ADD_TIMER(record_profile, "BuildRuntimeFilterTime"); - _open_timer = ADD_TIMER(profile(), "OpenTime"); _allocate_resource_timer = ADD_TIMER(profile(), "AllocateResourceTime"); _build_buckets_counter = ADD_COUNTER(profile(), "BuildBuckets", TUnit::UNIT); @@ -118,6 +118,8 @@ Status HashJoinBuildSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo } Status HashJoinBuildSinkLocalState::open(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); RETURN_IF_ERROR(JoinBuildSinkLocalState::open(state)); auto& p = _parent->cast(); @@ -438,6 +440,8 @@ Status HashJoinBuildSinkOperatorX::open(RuntimeState* state) { Status HashJoinBuildSinkOperatorX::sink(RuntimeState* state, vectorized::Block* in_block, SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); + COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)in_block->rows()); SCOPED_TIMER(local_state._build_timer); // make one block for each 4 gigabytes @@ -539,13 +543,12 @@ Status HashJoinBuildSinkOperatorX::sink(RuntimeState* state, vectorized::Block* } else if (!local_state._should_build_hash_table) { DCHECK(_shared_hashtable_controller != nullptr); DCHECK(_shared_hash_table_context != nullptr); - auto wait_timer = ADD_CHILD_TIMER(local_state._build_phase_profile, - "WaitForSharedHashTableTime", "BuildTime"); + auto wait_timer = ADD_TIMER(local_state.profile(), "WaitForSharedHashTableTime"); SCOPED_TIMER(wait_timer); RETURN_IF_ERROR( _shared_hashtable_controller->wait_for_signal(state, _shared_hash_table_context)); - local_state._build_phase_profile->add_info_string( + local_state.profile()->add_info_string( "SharedHashTableFrom", print_id(_shared_hashtable_controller->get_builder_fragment_instance_id(id()))); local_state._short_circuit_for_null_in_probe_side = diff --git a/be/src/pipeline/exec/hashjoin_build_sink.h b/be/src/pipeline/exec/hashjoin_build_sink.h index 38c67b0898d9b9..04f80e72843894 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.h +++ b/be/src/pipeline/exec/hashjoin_build_sink.h @@ -97,7 +97,6 @@ class HashJoinBuildSinkLocalState final RuntimeProfile::Counter* _build_collisions_counter; - RuntimeProfile::Counter* _open_timer; RuntimeProfile::Counter* _allocate_resource_timer; RuntimeProfile::Counter* _memory_usage_counter; diff --git a/be/src/pipeline/exec/hashjoin_probe_operator.cpp b/be/src/pipeline/exec/hashjoin_probe_operator.cpp index be888307bed5a0..7e534cf9447081 100644 --- a/be/src/pipeline/exec/hashjoin_probe_operator.cpp +++ b/be/src/pipeline/exec/hashjoin_probe_operator.cpp @@ -31,6 +31,8 @@ HashJoinProbeLocalState::HashJoinProbeLocalState(RuntimeState* state, OperatorXB Status HashJoinProbeLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(JoinProbeLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _probe_ignore_null = p._probe_ignore_null; _probe_expr_ctxs.resize(p._probe_expr_ctxs.size()); @@ -51,17 +53,12 @@ Status HashJoinProbeLocalState::init(RuntimeState* state, LocalStateInfo& info) _probe_arena_memory_usage = profile()->AddHighWaterMarkCounter("ProbeKeyArena", TUnit::BYTES, "MemoryUsage"); // Probe phase - auto probe_phase_profile = _probe_phase_profile; - _probe_next_timer = ADD_TIMER(probe_phase_profile, "ProbeFindNextTime"); - _probe_expr_call_timer = ADD_TIMER(probe_phase_profile, "ProbeExprCallTime"); - _search_hashtable_timer = - ADD_CHILD_TIMER(probe_phase_profile, "ProbeWhenSearchHashTableTime", "ProbeTime"); - _build_side_output_timer = - ADD_CHILD_TIMER(probe_phase_profile, "ProbeWhenBuildSideOutputTime", "ProbeTime"); - _probe_side_output_timer = - ADD_CHILD_TIMER(probe_phase_profile, "ProbeWhenProbeSideOutputTime", "ProbeTime"); - _probe_process_hashtable_timer = - ADD_CHILD_TIMER(probe_phase_profile, "ProbeWhenProcessHashTableTime", "ProbeTime"); + _probe_next_timer = ADD_TIMER(profile(), "ProbeFindNextTime"); + _probe_expr_call_timer = ADD_TIMER(profile(), "ProbeExprCallTime"); + _search_hashtable_timer = ADD_TIMER(profile(), "ProbeWhenSearchHashTableTime"); + _build_side_output_timer = ADD_TIMER(profile(), "ProbeWhenBuildSideOutputTime"); + _probe_side_output_timer = ADD_TIMER(profile(), "ProbeWhenProbeSideOutputTime"); + _probe_process_hashtable_timer = ADD_TIMER(profile(), "ProbeWhenProcessHashTableTime"); _process_other_join_conjunct_timer = ADD_TIMER(profile(), "OtherJoinConjunctTime"); return Status::OK(); } @@ -73,6 +70,8 @@ void HashJoinProbeLocalState::prepare_for_next() { } Status HashJoinProbeLocalState::close(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); if (_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/join_build_sink_operator.cpp b/be/src/pipeline/exec/join_build_sink_operator.cpp index f64f9128d76973..13cbcbcb03238d 100644 --- a/be/src/pipeline/exec/join_build_sink_operator.cpp +++ b/be/src/pipeline/exec/join_build_sink_operator.cpp @@ -32,11 +32,11 @@ Status JoinBuildSinkLocalState::init(RuntimeState* stat PipelineXSinkLocalState::profile()->add_info_string("JoinType", to_string(p._join_op)); - _build_phase_profile = PipelineXSinkLocalState::profile()->create_child( - "BuildPhase", true, true); - _build_get_next_timer = ADD_TIMER(_build_phase_profile, "BuildGetNextTime"); - _build_timer = ADD_TIMER(_build_phase_profile, "BuildTime"); - _build_rows_counter = ADD_COUNTER(_build_phase_profile, "BuildRows", TUnit::UNIT); + _build_get_next_timer = + ADD_TIMER(PipelineXSinkLocalState::profile(), "BuildGetNextTime"); + _build_timer = ADD_TIMER(PipelineXSinkLocalState::profile(), "BuildTime"); + _build_rows_counter = ADD_COUNTER(PipelineXSinkLocalState::profile(), + "BuildRows", TUnit::UNIT); _push_down_timer = ADD_TIMER(PipelineXSinkLocalState::profile(), "PublishRuntimeFilterTime"); diff --git a/be/src/pipeline/exec/join_build_sink_operator.h b/be/src/pipeline/exec/join_build_sink_operator.h index 3b1dbed3cbef6c..361982dd970cce 100644 --- a/be/src/pipeline/exec/join_build_sink_operator.h +++ b/be/src/pipeline/exec/join_build_sink_operator.h @@ -42,7 +42,6 @@ class JoinBuildSinkLocalState : public PipelineXSinkLocalState { bool _short_circuit_for_null_in_probe_side = false; - RuntimeProfile* _build_phase_profile; RuntimeProfile::Counter* _build_timer; RuntimeProfile::Counter* _build_get_next_timer; RuntimeProfile::Counter* _build_rows_counter; diff --git a/be/src/pipeline/exec/join_probe_operator.cpp b/be/src/pipeline/exec/join_probe_operator.cpp index 8cc8fbd4b24c14..c4776afe0190f6 100644 --- a/be/src/pipeline/exec/join_probe_operator.cpp +++ b/be/src/pipeline/exec/join_probe_operator.cpp @@ -38,12 +38,10 @@ Status JoinProbeLocalState::init(RuntimeState* state, RETURN_IF_ERROR(p._output_expr_ctxs[i]->clone(state, _output_expr_ctxs[i])); } - _probe_phase_profile = Base::profile()->create_child("ProbePhase", true, true); - _probe_timer = ADD_TIMER(_probe_phase_profile, "ProbeTime"); - _join_filter_timer = ADD_CHILD_TIMER(_probe_phase_profile, "JoinFilterTimer", "ProbeTime"); - _build_output_block_timer = - ADD_CHILD_TIMER(_probe_phase_profile, "BuildOutputBlock", "ProbeTime"); - _probe_rows_counter = ADD_COUNTER(_probe_phase_profile, "ProbeRows", TUnit::UNIT); + _probe_timer = ADD_TIMER(Base::profile(), "ProbeTime"); + _join_filter_timer = ADD_TIMER(Base::profile(), "JoinFilterTimer"); + _build_output_block_timer = ADD_TIMER(Base::profile(), "BuildOutputBlock"); + _probe_rows_counter = ADD_COUNTER(Base::profile(), "ProbeRows", TUnit::UNIT); return Status::OK(); } diff --git a/be/src/pipeline/exec/join_probe_operator.h b/be/src/pipeline/exec/join_probe_operator.h index 5debf92377c396..e3ed33e4d99748 100644 --- a/be/src/pipeline/exec/join_probe_operator.h +++ b/be/src/pipeline/exec/join_probe_operator.h @@ -53,7 +53,6 @@ class JoinProbeLocalState : public PipelineXLocalState { vectorized::MutableColumnPtr _tuple_is_null_left_flag_column; vectorized::MutableColumnPtr _tuple_is_null_right_flag_column; - RuntimeProfile* _probe_phase_profile; RuntimeProfile::Counter* _probe_timer; RuntimeProfile::Counter* _probe_rows_counter; RuntimeProfile::Counter* _join_filter_timer; diff --git a/be/src/pipeline/exec/nested_loop_join_build_operator.cpp b/be/src/pipeline/exec/nested_loop_join_build_operator.cpp index 103ae8a9435ffe..df00b850293a52 100644 --- a/be/src/pipeline/exec/nested_loop_join_build_operator.cpp +++ b/be/src/pipeline/exec/nested_loop_join_build_operator.cpp @@ -32,6 +32,8 @@ NestedLoopJoinBuildSinkLocalState::NestedLoopJoinBuildSinkLocalState(DataSinkOpe Status NestedLoopJoinBuildSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(JoinBuildSinkLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _filter_src_expr_ctxs.resize(p._filter_src_expr_ctxs.size()); for (size_t i = 0; i < _filter_src_expr_ctxs.size(); i++) { @@ -87,6 +89,8 @@ Status NestedLoopJoinBuildSinkOperatorX::sink(doris::RuntimeState* state, vector SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); + COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)block->rows()); SCOPED_TIMER(local_state._build_timer); auto rows = block->rows(); auto mem_usage = block->allocated_bytes(); diff --git a/be/src/pipeline/exec/nested_loop_join_probe_operator.cpp b/be/src/pipeline/exec/nested_loop_join_probe_operator.cpp index 050d5ae0b8e81d..5fc115969f2d34 100644 --- a/be/src/pipeline/exec/nested_loop_join_probe_operator.cpp +++ b/be/src/pipeline/exec/nested_loop_join_probe_operator.cpp @@ -51,6 +51,8 @@ NestedLoopJoinProbeLocalState::NestedLoopJoinProbeLocalState(RuntimeState* state Status NestedLoopJoinProbeLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(JoinProbeLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _join_conjuncts.resize(p._join_conjuncts.size()); for (size_t i = 0; i < _join_conjuncts.size(); i++) { @@ -61,6 +63,11 @@ Status NestedLoopJoinProbeLocalState::init(RuntimeState* state, LocalStateInfo& } Status NestedLoopJoinProbeLocalState::close(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); + if (_closed) { + return Status::OK(); + } _child_block->clear(); vectorized::Blocks tmp_build_blocks; diff --git a/be/src/pipeline/exec/repeat_operator.cpp b/be/src/pipeline/exec/repeat_operator.cpp index d2220febd49e68..ce9b0df1c1e37b 100644 --- a/be/src/pipeline/exec/repeat_operator.cpp +++ b/be/src/pipeline/exec/repeat_operator.cpp @@ -52,6 +52,8 @@ RepeatLocalState::RepeatLocalState(RuntimeState* state, OperatorXBase* parent) Status RepeatLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(Base::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _expr_ctxs.resize(p._expr_ctxs.size()); for (size_t i = 0; i < _expr_ctxs.size(); i++) { @@ -209,6 +211,7 @@ Status RepeatOperatorX::push(RuntimeState* state, vectorized::Block* input_block Status RepeatOperatorX::pull(doris::RuntimeState* state, vectorized::Block* output_block, SourceState& source_state) const { auto& local_state = state->get_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); auto& _repeat_id_idx = local_state._repeat_id_idx; auto& _child_block = *local_state._child_block; auto& _child_eos = local_state._child_eos; diff --git a/be/src/pipeline/exec/repeat_operator.h b/be/src/pipeline/exec/repeat_operator.h index 976b704ccd8e1e..08c6503e87092c 100644 --- a/be/src/pipeline/exec/repeat_operator.h +++ b/be/src/pipeline/exec/repeat_operator.h @@ -44,6 +44,7 @@ class RepeatOperator final : public StatefulOperator { Status close(RuntimeState* state) override; }; + class RepeatOperatorX; class RepeatLocalState final : public PipelineXLocalState { diff --git a/be/src/pipeline/exec/result_sink_operator.cpp b/be/src/pipeline/exec/result_sink_operator.cpp index 5cab5bededdb9c..02bf5bf99d457e 100644 --- a/be/src/pipeline/exec/result_sink_operator.cpp +++ b/be/src/pipeline/exec/result_sink_operator.cpp @@ -52,6 +52,8 @@ bool ResultSinkOperator::can_write() { Status ResultSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(PipelineXSinkLocalState<>::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto fragment_instance_id = state->fragment_instance_id(); // create sender std::shared_ptr sender = nullptr; @@ -62,6 +64,8 @@ Status ResultSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) } Status ResultSinkLocalState::open(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); RETURN_IF_ERROR(PipelineXSinkLocalState<>::open(state)); auto& p = _parent->cast(); _output_vexpr_ctxs.resize(p._output_vexpr_ctxs.size()); @@ -120,6 +124,7 @@ Status ResultSinkOperatorX::open(RuntimeState* state) { Status ResultSinkOperatorX::sink(RuntimeState* state, vectorized::Block* block, SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); if (_fetch_option.use_two_phase_fetch && block->rows() > 0) { RETURN_IF_ERROR(_second_phase_fetch_data(state, block)); } @@ -148,6 +153,8 @@ Status ResultSinkOperatorX::_second_phase_fetch_data(RuntimeState* state, } Status ResultSinkLocalState::close(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); if (_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/scan_operator.cpp b/be/src/pipeline/exec/scan_operator.cpp index 544bc3bc675fda..0b107d9a139545 100644 --- a/be/src/pipeline/exec/scan_operator.cpp +++ b/be/src/pipeline/exec/scan_operator.cpp @@ -114,6 +114,7 @@ template Status ScanLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(PipelineXLocalState<>::init(state, info)); SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); RETURN_IF_ERROR(RuntimeFilterConsumer::init(state)); _source_dependency = OrDependency::create_shared(PipelineXLocalState<>::_parent->id()); @@ -147,8 +148,6 @@ Status ScanLocalState::init(RuntimeState* state, LocalStateInfo& info) _get_next_timer = ADD_TIMER(_runtime_profile, "GetNextTime"); _prepare_rf_timer(_runtime_profile.get()); - - _open_timer = ADD_TIMER(_runtime_profile, "OpenTime"); _alloc_resource_timer = ADD_TIMER(_runtime_profile, "AllocateResourceTime"); static const std::string timer_name = "WaitForDependencyTime"; @@ -1338,23 +1337,31 @@ Status ScanOperatorX::try_close(RuntimeState* state) { template Status ScanLocalState::close(RuntimeState* state) { - SCOPED_TIMER(profile()->total_time_counter()); - if (_closed) { - return Status::OK(); - } - if (_scanner_ctx.get()) { - _scanner_ctx->clear_and_join(reinterpret_cast(this), state); - } + SCOPED_TIMER(_close_timer); if (_data_ready_dependency) { - COUNTER_SET(_wait_for_data_timer, _data_ready_dependency->read_watcher_elapse_time()); + COUNTER_UPDATE(_wait_for_data_timer, _data_ready_dependency->read_watcher_elapse_time()); + COUNTER_UPDATE(profile()->total_time_counter(), + _data_ready_dependency->read_watcher_elapse_time()); } if (_eos_dependency) { COUNTER_SET(_wait_for_eos_timer, _eos_dependency->read_watcher_elapse_time()); + COUNTER_UPDATE(profile()->total_time_counter(), + _eos_dependency->read_watcher_elapse_time()); } if (_scanner_done_dependency) { COUNTER_SET(_wait_for_scanner_done_timer, _scanner_done_dependency->read_watcher_elapse_time()); + COUNTER_UPDATE(profile()->total_time_counter(), + _scanner_done_dependency->read_watcher_elapse_time()); } + SCOPED_TIMER(profile()->total_time_counter()); + if (_closed) { + return Status::OK(); + } + if (_scanner_ctx.get()) { + _scanner_ctx->clear_and_join(reinterpret_cast(this), state); + } + return PipelineXLocalState<>::close(state); } @@ -1414,7 +1421,7 @@ Status ScanOperatorX::get_block(RuntimeState* state, vectorized: block->swap(*scan_block); local_state._scanner_ctx->return_free_block(std::move(scan_block)); - local_state.reached_limit(block, &eos); + local_state.reached_limit(block, source_state); if (eos) { source_state = SourceState::FINISHED; // reach limit, stop the scanners. diff --git a/be/src/pipeline/exec/scan_operator.h b/be/src/pipeline/exec/scan_operator.h index 1c2aaac332bfc7..9d94aaf9517db5 100644 --- a/be/src/pipeline/exec/scan_operator.h +++ b/be/src/pipeline/exec/scan_operator.h @@ -395,7 +395,6 @@ class ScanLocalState : public ScanLocalStateBase { std::vector _not_in_value_ranges; RuntimeProfile::Counter* _get_next_timer = nullptr; - RuntimeProfile::Counter* _open_timer = nullptr; RuntimeProfile::Counter* _alloc_resource_timer = nullptr; RuntimeProfile::Counter* _acquire_runtime_filter_timer = nullptr; diff --git a/be/src/pipeline/exec/select_operator.h b/be/src/pipeline/exec/select_operator.h index 16d6f0f3b31357..2c6a8721fc4227 100644 --- a/be/src/pipeline/exec/select_operator.h +++ b/be/src/pipeline/exec/select_operator.h @@ -60,6 +60,7 @@ class SelectOperatorX final : public StreamingOperatorX { Status pull(RuntimeState* state, vectorized::Block* block, SourceState& source_state) override { auto& local_state = state->get_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); RETURN_IF_CANCELLED(state); RETURN_IF_ERROR(vectorized::VExprContext::filter_block(local_state._conjuncts, block, block->columns())); diff --git a/be/src/pipeline/exec/sort_sink_operator.cpp b/be/src/pipeline/exec/sort_sink_operator.cpp index e96bff55ad5f5f..291a30ef2346e1 100644 --- a/be/src/pipeline/exec/sort_sink_operator.cpp +++ b/be/src/pipeline/exec/sort_sink_operator.cpp @@ -30,6 +30,8 @@ OPERATOR_CODE_GENERATOR(SortSinkOperator, StreamingOperator) Status SortSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(PipelineXSinkLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); RETURN_IF_ERROR(p._vsort_exec_exprs.clone(state, _vsort_exec_exprs)); @@ -143,6 +145,8 @@ Status SortSinkOperatorX::open(RuntimeState* state) { Status SortSinkOperatorX::sink(doris::RuntimeState* state, vectorized::Block* in_block, SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); + COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)in_block->rows()); if (in_block->rows() > 0) { RETURN_IF_ERROR(local_state._shared_state->sorter->append_block(in_block)); RETURN_IF_CANCELLED(state); diff --git a/be/src/pipeline/exec/sort_source_operator.cpp b/be/src/pipeline/exec/sort_source_operator.cpp index 89929ea35151fd..e52b995b1dd25d 100644 --- a/be/src/pipeline/exec/sort_source_operator.cpp +++ b/be/src/pipeline/exec/sort_source_operator.cpp @@ -30,6 +30,8 @@ SortLocalState::SortLocalState(RuntimeState* state, OperatorXBase* parent) Status SortLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(PipelineXLocalState::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); _get_next_timer = ADD_TIMER(profile(), "GetResultTime"); return Status::OK(); } @@ -41,14 +43,15 @@ SortSourceOperatorX::SortSourceOperatorX(ObjectPool* pool, const TPlanNode& tnod Status SortSourceOperatorX::get_block(RuntimeState* state, vectorized::Block* block, SourceState& source_state) { auto& local_state = state->get_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); SCOPED_TIMER(local_state._get_next_timer); bool eos; RETURN_IF_ERROR_OR_CATCH_EXCEPTION( local_state._shared_state->sorter->get_next(state, block, &eos)); - local_state.reached_limit(block, &eos); if (eos) { source_state = SourceState::FINISHED; } + local_state.reached_limit(block, source_state); return Status::OK(); } @@ -58,6 +61,8 @@ Dependency* SortSourceOperatorX::wait_for_dependency(RuntimeState* state) { } Status SortLocalState::close(RuntimeState* state) { + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_close_timer); if (_closed) { return Status::OK(); } diff --git a/be/src/pipeline/exec/streaming_aggregation_sink_operator.cpp b/be/src/pipeline/exec/streaming_aggregation_sink_operator.cpp index e6915dd4e6415f..b58cd4fdc506c5 100644 --- a/be/src/pipeline/exec/streaming_aggregation_sink_operator.cpp +++ b/be/src/pipeline/exec/streaming_aggregation_sink_operator.cpp @@ -369,6 +369,7 @@ Status StreamingAggSinkOperatorX::init(const TPlanNode& tnode, RuntimeState* sta Status StreamingAggSinkOperatorX::sink(RuntimeState* state, vectorized::Block* in_block, SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)in_block->rows()); local_state._shared_state->input_num_rows += in_block->rows(); Status ret = Status::OK(); diff --git a/be/src/pipeline/exec/union_sink_operator.cpp b/be/src/pipeline/exec/union_sink_operator.cpp index c1fd75d820fef7..824decd790e2d4 100644 --- a/be/src/pipeline/exec/union_sink_operator.cpp +++ b/be/src/pipeline/exec/union_sink_operator.cpp @@ -96,6 +96,8 @@ Status UnionSinkOperator::close(RuntimeState* state) { Status UnionSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& info) { RETURN_IF_ERROR(Base::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); _child_expr.resize(p._child_expr.size()); for (size_t i = 0; i < p._child_expr.size(); i++) { @@ -144,6 +146,8 @@ Status UnionSinkOperatorX::open(RuntimeState* state) { Status UnionSinkOperatorX::sink(RuntimeState* state, vectorized::Block* in_block, SourceState source_state) { auto& local_state = state->get_sink_local_state(id())->cast(); + SCOPED_TIMER(local_state.profile()->total_time_counter()); + COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)in_block->rows()); if (local_state._output_block == nullptr) { local_state._output_block = local_state._shared_state->_data_queue->get_free_block(_cur_child_id); @@ -184,4 +188,4 @@ Status UnionSinkOperatorX::sink(RuntimeState* state, vectorized::Block* in_block return Status::OK(); } -} // namespace doris::pipeline \ No newline at end of file +} // namespace doris::pipeline diff --git a/be/src/pipeline/exec/union_source_operator.cpp b/be/src/pipeline/exec/union_source_operator.cpp index 815972e5e01060..041038bd166b48 100644 --- a/be/src/pipeline/exec/union_source_operator.cpp +++ b/be/src/pipeline/exec/union_source_operator.cpp @@ -100,6 +100,8 @@ Status UnionSourceOperator::get_block(RuntimeState* state, vectorized::Block* bl Status UnionSourceLocalState::init(RuntimeState* state, LocalStateInfo& info) { RETURN_IF_ERROR(Base::init(state, info)); + SCOPED_TIMER(profile()->total_time_counter()); + SCOPED_TIMER(_open_timer); auto& p = _parent->cast(); std::shared_ptr data_queue = std::make_shared(p._child_size, nullptr, _dependency); @@ -110,7 +112,7 @@ Status UnionSourceLocalState::init(RuntimeState* state, LocalStateInfo& info) { Status UnionSourceOperatorX::get_block(RuntimeState* state, vectorized::Block* block, SourceState& source_state) { auto& local_state = state->get_local_state(id())->cast(); - bool eos = false; + SCOPED_TIMER(local_state.profile()->total_time_counter()); std::unique_ptr output_block = vectorized::Block::create_unique(); int child_idx = 0; local_state._shared_state->_data_queue->get_block_from_queue(&output_block, &child_idx); @@ -121,7 +123,7 @@ Status UnionSourceOperatorX::get_block(RuntimeState* state, vectorized::Block* b output_block->clear_column_data(row_desc().num_materialized_slots()); local_state._shared_state->_data_queue->push_free_block(std::move(output_block), child_idx); - local_state.reached_limit(block, &eos); + local_state.reached_limit(block, source_state); //have exectue const expr, queue have no data any more, and child could be colsed if ((!_has_data(state) && local_state._shared_state->_data_queue->is_all_finish())) { source_state = SourceState::FINISHED; @@ -134,4 +136,4 @@ Status UnionSourceOperatorX::get_block(RuntimeState* state, vectorized::Block* b } } // namespace pipeline -} // namespace doris \ No newline at end of file +} // namespace doris diff --git a/be/src/pipeline/pipeline_x/operator.cpp b/be/src/pipeline/pipeline_x/operator.cpp index 3452588bad03fd..64e5fc5d4619ab 100644 --- a/be/src/pipeline/pipeline_x/operator.cpp +++ b/be/src/pipeline/pipeline_x/operator.cpp @@ -196,16 +196,6 @@ bool PipelineXLocalStateBase::reached_limit() const { return _parent->_limit != -1 && _num_rows_returned >= _parent->_limit; } -void PipelineXLocalStateBase::reached_limit(vectorized::Block* block, bool* eos) { - if (_parent->_limit != -1 and _num_rows_returned + block->rows() >= _parent->_limit) { - block->set_num_rows(_parent->_limit - _num_rows_returned); - *eos = true; - } - - _num_rows_returned += block->rows(); - COUNTER_SET(_rows_returned_counter, _num_rows_returned); -} - void PipelineXLocalStateBase::reached_limit(vectorized::Block* block, SourceState& source_state) { if (_parent->_limit != -1 and _num_rows_returned + block->rows() >= _parent->_limit) { block->set_num_rows(_parent->_limit - _num_rows_returned); @@ -213,6 +203,7 @@ void PipelineXLocalStateBase::reached_limit(vectorized::Block* block, SourceStat } _num_rows_returned += block->rows(); + COUNTER_UPDATE(_blocks_returned_counter, 1); COUNTER_SET(_rows_returned_counter, _num_rows_returned); } @@ -281,8 +272,6 @@ Status StreamingOperatorX::get_block(RuntimeState* state, vector SourceState& source_state) { RETURN_IF_ERROR(OperatorX::_child_x->get_next_after_projects(state, block, source_state)); - COUNTER_UPDATE(state->get_local_state(OperatorX::id())->rows_input_counter(), - (int64_t)block->rows()); return pull(state, block, source_state); } @@ -295,7 +284,6 @@ Status StatefulOperatorX::get_block(RuntimeState* state, vectori local_state._child_block->clear_column_data(); RETURN_IF_ERROR(OperatorX::_child_x->get_next_after_projects( state, local_state._child_block.get(), local_state._child_source_state)); - COUNTER_UPDATE(local_state.rows_input_counter(), (int64_t)local_state._child_block->rows()); source_state = local_state._child_source_state; if (local_state._child_block->rows() == 0 && local_state._child_source_state != SourceState::FINISHED) { diff --git a/be/src/pipeline/pipeline_x/operator.h b/be/src/pipeline/pipeline_x/operator.h index 13cf56c65d638c..60e034508af0ea 100644 --- a/be/src/pipeline/pipeline_x/operator.h +++ b/be/src/pipeline/pipeline_x/operator.h @@ -74,7 +74,6 @@ class PipelineXLocalStateBase { void clear_origin_block(); bool reached_limit() const; - void reached_limit(vectorized::Block* block, bool* eos); void reached_limit(vectorized::Block* block, SourceState& source_state); RuntimeProfile* profile() { return _runtime_profile.get(); } @@ -84,7 +83,7 @@ class PipelineXLocalStateBase { RuntimeProfile::Counter* memory_used_counter() { return _memory_used_counter; } RuntimeProfile::Counter* projection_timer() { return _projection_timer; } RuntimeProfile::Counter* wait_for_dependency_timer() { return _wait_for_dependency_timer; } - RuntimeProfile::Counter* rows_input_counter() { return _rows_input_counter; } + RuntimeProfile::Counter* blocks_returned_counter() { return _rows_returned_counter; } OperatorXBase* parent() { return _parent; } RuntimeState* state() { return _state; } @@ -109,7 +108,7 @@ class PipelineXLocalStateBase { std::unique_ptr _mem_tracker; RuntimeProfile::Counter* _rows_returned_counter; - RuntimeProfile::Counter* _rows_input_counter; + RuntimeProfile::Counter* _blocks_returned_counter; RuntimeProfile::Counter* _rows_returned_rate; RuntimeProfile::Counter* _wait_for_dependency_timer; // Account for peak memory used by this node @@ -117,6 +116,8 @@ class PipelineXLocalStateBase { RuntimeProfile::Counter* _projection_timer; // Account for peak memory used by this node RuntimeProfile::Counter* _peak_memory_usage_counter; + RuntimeProfile::Counter* _open_timer = nullptr; + RuntimeProfile::Counter* _close_timer = nullptr; OpentelemetrySpan _span; OperatorXBase* _parent; @@ -314,8 +315,10 @@ class PipelineXLocalState : public PipelineXLocalStateBase { RETURN_IF_ERROR(_parent->_projections[i]->clone(state, _projections[i])); } _rows_returned_counter = ADD_COUNTER(_runtime_profile, "RowsReturned", TUnit::UNIT); - _rows_input_counter = ADD_COUNTER(_runtime_profile, "InputRows", TUnit::UNIT); + _blocks_returned_counter = ADD_COUNTER(_runtime_profile, "BlocksReturned", TUnit::UNIT); _projection_timer = ADD_TIMER(_runtime_profile, "ProjectionTime"); + _open_timer = ADD_TIMER(_runtime_profile, "OpenTime"); + _close_timer = ADD_TIMER(_runtime_profile, "CloseTime"); _rows_returned_rate = profile()->add_derived_counter( doris::ExecNode::ROW_THROUGHPUT_COUNTER, TUnit::UNIT_PER_SECOND, std::bind(&RuntimeProfile::units_per_second, _rows_returned_counter, @@ -412,6 +415,8 @@ class PipelineXSinkLocalStateBase { std::make_unique("faker profile"); RuntimeProfile::Counter* _rows_input_counter; + RuntimeProfile::Counter* _open_timer = nullptr; + RuntimeProfile::Counter* _close_timer = nullptr; }; class DataSinkOperatorXBase : public OperatorBase { @@ -541,6 +546,8 @@ class PipelineXSinkLocalState : public PipelineXSinkLocalStateBase { _profile = state->obj_pool()->add(new RuntimeProfile( _parent->get_name() + " (id=" + std::to_string(_parent->id()) + ")")); _rows_input_counter = ADD_COUNTER(_profile, "InputRows", TUnit::UNIT); + _open_timer = ADD_TIMER(_profile, "OpenTime"); + _close_timer = ADD_TIMER(_profile, "CloseTime"); info.parent_profile->add_child(_profile, true, nullptr); _mem_tracker = std::make_unique(_parent->get_name()); return Status::OK(); diff --git a/be/src/pipeline/pipeline_x/pipeline_x_task.cpp b/be/src/pipeline/pipeline_x/pipeline_x_task.cpp index a4f195c20c5881..ae37c233adc434 100644 --- a/be/src/pipeline/pipeline_x/pipeline_x_task.cpp +++ b/be/src/pipeline/pipeline_x/pipeline_x_task.cpp @@ -62,6 +62,10 @@ Status PipelineXTask::prepare(RuntimeState* state, const TPipelineInstanceParams SCOPED_CPU_TIMER(_task_cpu_timer); SCOPED_TIMER(_prepare_timer); + LocalSinkStateInfo sink_info {_pipeline->pipeline_profile(), local_params.sender_id, + get_downstream_dependency().get()}; + RETURN_IF_ERROR(_sink->setup_local_state(state, sink_info)); + std::vector no_scan_ranges; auto scan_ranges = find_with_default(local_params.per_node_scan_ranges, _operators.front()->id(), no_scan_ranges); @@ -74,10 +78,6 @@ Status PipelineXTask::prepare(RuntimeState* state, const TPipelineInstanceParams RETURN_IF_ERROR(_operators[op_idx]->setup_local_state(state, info)); } - LocalSinkStateInfo info {_pipeline->pipeline_profile(), local_params.sender_id, - get_downstream_dependency().get()}; - RETURN_IF_ERROR(_sink->setup_local_state(state, info)); - _block = doris::vectorized::Block::create_unique(); // We should make sure initial state for task are runnable so that we can do some preparation jobs (e.g. initialize runtime filters). @@ -105,7 +105,6 @@ void PipelineXTask::_init_profile() { _finalize_timer = ADD_CHILD_TIMER(_task_profile, "FinalizeTime", exec_time); _close_timer = ADD_CHILD_TIMER(_task_profile, "CloseTime", exec_time); - _wait_source_timer = ADD_TIMER(_task_profile, "WaitSourceTime"); _wait_bf_timer = ADD_TIMER(_task_profile, "WaitBfTime"); _wait_sink_timer = ADD_TIMER(_task_profile, "WaitSinkTime"); _wait_worker_timer = ADD_TIMER(_task_profile, "WaitWorkerTime"); @@ -119,7 +118,6 @@ void PipelineXTask::_init_profile() { } void PipelineXTask::_fresh_profile_counter() { - COUNTER_SET(_wait_source_timer, (int64_t)_wait_source_watcher.elapsed_time()); COUNTER_SET(_wait_bf_timer, (int64_t)_wait_bf_watcher.elapsed_time()); COUNTER_SET(_schedule_counts, (int64_t)_schedule_time); COUNTER_SET(_wait_sink_timer, (int64_t)_wait_sink_watcher.elapsed_time()); From 8012ac7661ac07318dfa98dfd8b602376b44516c Mon Sep 17 00:00:00 2001 From: Lei Zhang <27994433+SWJTU-ZhangLei@users.noreply.github.com> Date: Sat, 16 Sep 2023 14:52:27 +0800 Subject: [PATCH 07/33] [fix](bdbje) Remove improper check for journalId (#24464) * Introduced by https://github.com/apache/doris/pull/24259 --- .../src/main/java/org/apache/doris/catalog/Env.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index b186c61847dfa9..d648776980b280 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -2653,14 +2653,6 @@ public synchronized boolean replayJournal(long toJournalId) { LOG.warn("replay journal cost too much time: {} replayedJournalId: {}", cost, replayedJournalId); } - if (replayedJournalId.get() != newToJournalId) { - String msg = "replayedJournalId:" + replayedJournalId.get() + " not equal with newToJournalId:" - + newToJournalId + " , will exit"; - LOG.error(msg); - Util.stdoutWithTime(msg); - System.exit(-1); - } - return hasLog; } From 76ac77b1d0a237727117cefb8534d028befd059c Mon Sep 17 00:00:00 2001 From: huanghaibin <284824253@qq.com> Date: Sat, 16 Sep 2023 16:25:55 +0800 Subject: [PATCH 08/33] [fix](compaction) compaction should catch exception when read next block (#24484) --- be/src/vec/olap/vcollect_iterator.cpp | 11 +++-- be/src/vec/olap/vertical_merge_iterator.cpp | 52 +++++++++++---------- be/src/vec/olap/vertical_merge_iterator.h | 4 +- 3 files changed, 37 insertions(+), 30 deletions(-) diff --git a/be/src/vec/olap/vcollect_iterator.cpp b/be/src/vec/olap/vcollect_iterator.cpp index e62b9ca4ce3fb1..b34893b6309f40 100644 --- a/be/src/vec/olap/vcollect_iterator.cpp +++ b/be/src/vec/olap/vcollect_iterator.cpp @@ -795,10 +795,13 @@ Status VCollectIterator::Level1Iterator::_merge_next(Block* block) { // clear block column data if (pre_row_ref.row_pos + continuous_row_in_block == pre_row_ref.block->rows()) { const auto& src_block = pre_row_ref.block; - for (size_t i = 0; i < column_count; ++i) { - target_columns[i]->insert_range_from(*(src_block->get_by_position(i).column), - pre_row_ref.row_pos, continuous_row_in_block); - } + RETURN_IF_CATCH_EXCEPTION({ + for (size_t i = 0; i < column_count; ++i) { + target_columns[i]->insert_range_from(*(src_block->get_by_position(i).column), + pre_row_ref.row_pos, + continuous_row_in_block); + } + }); continuous_row_in_block = 0; pre_row_ref.reset(); } diff --git a/be/src/vec/olap/vertical_merge_iterator.cpp b/be/src/vec/olap/vertical_merge_iterator.cpp index 6e84c5f2b1c7b9..3ff1282fd26844 100644 --- a/be/src/vec/olap/vertical_merge_iterator.cpp +++ b/be/src/vec/olap/vertical_merge_iterator.cpp @@ -277,45 +277,49 @@ bool VerticalMergeIteratorContext::compare(const VerticalMergeIteratorContext& r return result; } -void VerticalMergeIteratorContext::copy_rows(Block* block, size_t count) { +Status VerticalMergeIteratorContext::copy_rows(Block* block, size_t count) { Block& src = *_block; Block& dst = *block; DCHECK(count > 0); auto start = _index_in_block; _index_in_block += count - 1; + RETURN_IF_CATCH_EXCEPTION({ + for (size_t i = 0; i < _ori_return_cols; ++i) { + auto& s_col = src.get_by_position(i); + auto& d_col = dst.get_by_position(i); - for (size_t i = 0; i < _ori_return_cols; ++i) { - auto& s_col = src.get_by_position(i); - auto& d_col = dst.get_by_position(i); + ColumnPtr& s_cp = s_col.column; + ColumnPtr& d_cp = d_col.column; - ColumnPtr& s_cp = s_col.column; - ColumnPtr& d_cp = d_col.column; - - d_cp->assume_mutable()->insert_range_from(*s_cp, start, count); - } + d_cp->assume_mutable()->insert_range_from(*s_cp, start, count); + } + }); + return Status::OK(); } // `advanced = false` when current block finished -void VerticalMergeIteratorContext::copy_rows(Block* block, bool advanced) { +Status VerticalMergeIteratorContext::copy_rows(Block* block, bool advanced) { Block& src = *_block; Block& dst = *block; if (_cur_batch_num == 0) { - return; + return Status::OK(); } // copy a row to dst block column by column size_t start = _index_in_block - _cur_batch_num + 1 - advanced; + RETURN_IF_CATCH_EXCEPTION({ + for (size_t i = 0; i < _ori_return_cols; ++i) { + auto& s_col = src.get_by_position(i); + auto& d_col = dst.get_by_position(i); - for (size_t i = 0; i < _ori_return_cols; ++i) { - auto& s_col = src.get_by_position(i); - auto& d_col = dst.get_by_position(i); + ColumnPtr& s_cp = s_col.column; + ColumnPtr& d_cp = d_col.column; - ColumnPtr& s_cp = s_col.column; - ColumnPtr& d_cp = d_col.column; - - d_cp->assume_mutable()->insert_range_from(*s_cp, start, _cur_batch_num); - } + d_cp->assume_mutable()->insert_range_from(*s_cp, start, _cur_batch_num); + } + }); _cur_batch_num = 0; + return Status::OK(); } Status VerticalMergeIteratorContext::init(const StorageReadOptions& opts) { @@ -426,14 +430,14 @@ Status VerticalHeapMergeIterator::next_batch(Block* block) { // skip cur row, copy pre ctx ++_merged_rows; if (pre_ctx) { - pre_ctx->copy_rows(block); + RETURN_IF_ERROR(pre_ctx->copy_rows(block)); pre_ctx = nullptr; } } else { ctx->add_cur_batch(); if (pre_ctx != ctx) { if (pre_ctx) { - pre_ctx->copy_rows(block); + RETURN_IF_ERROR(pre_ctx->copy_rows(block)); } pre_ctx = ctx; } @@ -444,7 +448,7 @@ Status VerticalHeapMergeIterator::next_batch(Block* block) { if (ctx->is_cur_block_finished() || row_idx >= _block_row_max) { // current block finished, ctx not advance // so copy start_idx = (_index_in_block - _cur_batch_num + 1) - ctx->copy_rows(block, false); + RETURN_IF_ERROR(ctx->copy_rows(block, false)); pre_ctx = nullptr; } } @@ -547,7 +551,7 @@ Status VerticalFifoMergeIterator::next_batch(Block* block) { if (_cur_iter_ctx->is_cur_block_finished() || row_idx >= _block_row_max) { // current block finished, ctx not advance // so copy start_idx = (_index_in_block - _cur_batch_num + 1) - _cur_iter_ctx->copy_rows(block, false); + RETURN_IF_ERROR(_cur_iter_ctx->copy_rows(block, false)); } RETURN_IF_ERROR(_cur_iter_ctx->advance()); @@ -727,7 +731,7 @@ Status VerticalMaskMergeIterator::next_batch(Block* block) { auto same_source_cnt = _row_sources_buf->same_source_count(order, limit); _row_sources_buf->advance(same_source_cnt); // copy rows to block - ctx->copy_rows(block, same_source_cnt); + RETURN_IF_ERROR(ctx->copy_rows(block, same_source_cnt)); RETURN_IF_ERROR(ctx->advance()); rows += same_source_cnt; st = _row_sources_buf->has_remaining(); diff --git a/be/src/vec/olap/vertical_merge_iterator.h b/be/src/vec/olap/vertical_merge_iterator.h index b587b6f2cdb11c..70a452b2b6ddb3 100644 --- a/be/src/vec/olap/vertical_merge_iterator.h +++ b/be/src/vec/olap/vertical_merge_iterator.h @@ -164,8 +164,8 @@ class VerticalMergeIteratorContext { Status block_reset(const std::shared_ptr& block); Status init(const StorageReadOptions& opts); bool compare(const VerticalMergeIteratorContext& rhs) const; - void copy_rows(Block* block, bool advanced = true); - void copy_rows(Block* block, size_t count); + Status copy_rows(Block* block, bool advanced = true); + Status copy_rows(Block* block, size_t count); Status advance(); From 643d09de0669aa525240b00ab51d3318c1a9c2f6 Mon Sep 17 00:00:00 2001 From: qiye Date: Sat, 16 Sep 2023 16:42:39 +0800 Subject: [PATCH 09/33] [fix](index compaction)skip index compaction when no output segment (#24468) --- be/src/olap/compaction.cpp | 105 ++++++++++++++++++++----------------- 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/be/src/olap/compaction.cpp b/be/src/olap/compaction.cpp index 08feb6b04863e4..fdcf5a76c58859 100644 --- a/be/src/olap/compaction.cpp +++ b/be/src/olap/compaction.cpp @@ -417,56 +417,65 @@ Status Compaction::do_compaction_impl(int64_t permits) { auto src_segment_num = src_seg_to_id_map.size(); auto dest_segment_num = dest_segment_num_rows.size(); - // src index files - // format: rowsetId_segmentId - std::vector src_index_files(src_segment_num); - for (auto m : src_seg_to_id_map) { - std::pair p = m.first; - src_index_files[m.second] = p.first.to_string() + "_" + std::to_string(p.second); - } - - // dest index files - // format: rowsetId_segmentId - std::vector dest_index_files(dest_segment_num); - for (int i = 0; i < dest_segment_num; ++i) { - auto prefix = dest_rowset_id.to_string() + "_" + std::to_string(i); - dest_index_files[i] = prefix; - } + if (dest_segment_num > 0) { + // src index files + // format: rowsetId_segmentId + std::vector src_index_files(src_segment_num); + for (auto m : src_seg_to_id_map) { + std::pair p = m.first; + src_index_files[m.second] = p.first.to_string() + "_" + std::to_string(p.second); + } - // create index_writer to compaction indexes - auto& fs = _output_rowset->rowset_meta()->fs(); - auto& tablet_path = _tablet->tablet_path(); - - DCHECK(dest_index_files.size() > 0); - // we choose the first destination segment name as the temporary index writer path - // Used to distinguish between different index compaction - auto index_writer_path = tablet_path + "/" + dest_index_files[0]; - LOG(INFO) << "start index compaction" - << ". tablet=" << _tablet->full_name() - << ", source index size=" << src_segment_num - << ", destination index size=" << dest_segment_num << "."; - std::for_each( - ctx.skip_inverted_index.cbegin(), ctx.skip_inverted_index.cend(), - [&src_segment_num, &dest_segment_num, &index_writer_path, &src_index_files, - &dest_index_files, &fs, &tablet_path, &trans_vec, &dest_segment_num_rows, - this](int32_t column_uniq_id) { - auto st = compact_column( - _cur_tablet_schema->get_inverted_index(column_uniq_id)->index_id(), - src_segment_num, dest_segment_num, src_index_files, dest_index_files, - fs, index_writer_path, tablet_path, trans_vec, dest_segment_num_rows); - if (!st.ok()) { - LOG(ERROR) << "failed to do index compaction" - << ". tablet=" << _tablet->full_name() - << ". column uniq id=" << column_uniq_id << ". index_id= " - << _cur_tablet_schema->get_inverted_index(column_uniq_id) - ->index_id(); - } - }); + // dest index files + // format: rowsetId_segmentId + std::vector dest_index_files(dest_segment_num); + for (int i = 0; i < dest_segment_num; ++i) { + auto prefix = dest_rowset_id.to_string() + "_" + std::to_string(i); + dest_index_files[i] = prefix; + } - LOG(INFO) << "succeed to do index compaction" - << ". tablet=" << _tablet->full_name() << ", input row number=" << _input_row_num - << ", output row number=" << _output_rowset->num_rows() - << ". elapsed time=" << inverted_watch.get_elapse_second() << "s."; + // create index_writer to compaction indexes + auto& fs = _output_rowset->rowset_meta()->fs(); + auto& tablet_path = _tablet->tablet_path(); + + // we choose the first destination segment name as the temporary index writer path + // Used to distinguish between different index compaction + auto index_writer_path = tablet_path + "/" + dest_index_files[0]; + LOG(INFO) << "start index compaction" + << ". tablet=" << _tablet->full_name() + << ", source index size=" << src_segment_num + << ", destination index size=" << dest_segment_num << "."; + std::for_each( + ctx.skip_inverted_index.cbegin(), ctx.skip_inverted_index.cend(), + [&src_segment_num, &dest_segment_num, &index_writer_path, &src_index_files, + &dest_index_files, &fs, &tablet_path, &trans_vec, &dest_segment_num_rows, + this](int32_t column_uniq_id) { + auto st = compact_column( + _cur_tablet_schema->get_inverted_index(column_uniq_id)->index_id(), + src_segment_num, dest_segment_num, src_index_files, + dest_index_files, fs, index_writer_path, tablet_path, trans_vec, + dest_segment_num_rows); + if (!st.ok()) { + LOG(ERROR) << "failed to do index compaction" + << ". tablet=" << _tablet->full_name() + << ". column uniq id=" << column_uniq_id << ". index_id= " + << _cur_tablet_schema->get_inverted_index(column_uniq_id) + ->index_id(); + } + }); + + LOG(INFO) << "succeed to do index compaction" + << ". tablet=" << _tablet->full_name() + << ", input row number=" << _input_row_num + << ", output row number=" << _output_rowset->num_rows() + << ". elapsed time=" << inverted_watch.get_elapse_second() << "s."; + } else { + LOG(INFO) << "skip doing index compaction due to no output segments" + << ". tablet=" << _tablet->full_name() + << ", input row number=" << _input_row_num + << ", output row number=" << _output_rowset->num_rows() + << ". elapsed time=" << inverted_watch.get_elapse_second() << "s."; + } } // 4. modify rowsets in memory From ed8db3727cfde8c9732e6ee75ee9135b44f976df Mon Sep 17 00:00:00 2001 From: bobhan1 Date: Sat, 16 Sep 2023 17:11:59 +0800 Subject: [PATCH 10/33] [feature](partial update) support MOW partial update for insert statement (#21597) --- docs/en/docs/advanced/variables.md | 6 + .../Manipulation/INSERT.md | 3 + docs/zh-CN/docs/advanced/variables.md | 6 + .../Manipulation/INSERT.md | 2 + .../org/apache/doris/analysis/DeleteStmt.java | 1 + .../doris/analysis/NativeInsertStmt.java | 59 ++++- .../org/apache/doris/analysis/UpdateStmt.java | 1 + .../analyzer/UnboundOlapTableSink.java | 30 ++- .../translator/PhysicalPlanTranslator.java | 25 +- .../nereids/parser/LogicalPlanBuilder.java | 2 + .../nereids/rules/analysis/BindSink.java | 1 + ...lOlapTableSinkToPhysicalOlapTableSink.java | 1 + .../commands/InsertIntoTableCommand.java | 2 + .../plans/logical/LogicalOlapTableSink.java | 29 ++- .../plans/physical/PhysicalOlapTableSink.java | 36 ++- .../org/apache/doris/qe/SessionVariable.java | 13 + .../insert_into_table/partial_update.out | 66 +++++ .../partial_update_complex.out | 37 +++ ...test_partial_update_native_insert_stmt.out | 66 +++++ ...tial_update_native_insert_stmt_complex.out | 37 +++ .../insert_into_table/partial_update.groovy | 227 ++++++++++++++++++ .../partial_update_complex.groovy | 117 +++++++++ ...t_partial_update_native_insert_stmt.groovy | 227 ++++++++++++++++++ ...l_update_native_insert_stmt_complex.groovy | 117 +++++++++ 24 files changed, 1078 insertions(+), 33 deletions(-) create mode 100644 regression-test/data/nereids_p0/insert_into_table/partial_update.out create mode 100644 regression-test/data/nereids_p0/insert_into_table/partial_update_complex.out create mode 100644 regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.out create mode 100644 regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.out create mode 100644 regression-test/suites/nereids_p0/insert_into_table/partial_update.groovy create mode 100644 regression-test/suites/nereids_p0/insert_into_table/partial_update_complex.groovy create mode 100644 regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.groovy create mode 100644 regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.groovy diff --git a/docs/en/docs/advanced/variables.md b/docs/en/docs/advanced/variables.md index 162744b25de50e..0fc218a1faa6ae 100644 --- a/docs/en/docs/advanced/variables.md +++ b/docs/en/docs/advanced/variables.md @@ -692,6 +692,12 @@ Translated with www.DeepL.com/Translator (free version) Build MemTable on DataSink node, and send segments to other backends through brpc streaming. It reduces duplicate work among replicas, and saves time in data serialization & deserialization. +* `enable_unique_key_partial_update` + + + Whether to enable partial columns update semantics for native insert into statement, default is false. + + *** #### Supplementary instructions on statement execution timeout control diff --git a/docs/en/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md b/docs/en/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md index 2fa97eae012e87..dd11abb99cd5e4 100644 --- a/docs/en/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md +++ b/docs/en/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md @@ -64,6 +64,9 @@ INSERT [IGNORE] INTO table_name > 2. SHUFFLE: When the target table is a partition table, enabling this hint will do repartiiton. > 3. NOSHUFFLE: Even if the target table is a partition table, repartiiton will not be performed, but some other operations will be performed to ensure that the data is correctly dropped into each partition. +For a Unique table with merge-on-write enabled, you can also perform partial columns updates using the insert statement. To perform partial column updates with the insert statement, you need to set the session variable enable_unique_key_partial_update to true (the default value for this variable is false, meaning partial columns updates with the insert statement are not allowed by default). When performing partial columns updates, the columns being inserted must contain at least all the Key columns and specify the columns you want to update. If the Key column values for the inserted row already exist in the original table, the data in the row with the same key column values will be updated. If the Key column values for the inserted row do not exist in the original table, a new row will be inserted into the table. In this case, columns not specified in the insert statement must either have default values or be nullable. These missing columns will first attempt to be populated with default values, and if a column has no default value, it will be filled with null. If a column cannot be null, the insert operation will fail. + + Notice: When executing the `INSERT` statement, the default behavior is to filter the data that does not conform to the target table format, such as the string is too long. However, for business scenarios that require data not to be filtered, you can set the session variable `enable_insert_strict` to `true` to ensure that `INSERT` will not be executed successfully when data is filtered out. diff --git a/docs/zh-CN/docs/advanced/variables.md b/docs/zh-CN/docs/advanced/variables.md index 881ded17ea55f5..bea3836ede9435 100644 --- a/docs/zh-CN/docs/advanced/variables.md +++ b/docs/zh-CN/docs/advanced/variables.md @@ -679,6 +679,12 @@ try (Connection conn = DriverManager.getConnection("jdbc:mysql://127.0.0.1:9030/ 在 DataSink 节点上构建 MemTable,并通过 brpc streaming 发送 segment 到其他 BE。 该方法减少了多副本之间的重复工作,并且节省了数据序列化和反序列化的时间。 +* `enable_unique_key_partial_update` + + + 是否在对insert into语句启用部分列更新的语义,默认为 false + + *** #### 关于语句执行超时控制的补充说明 diff --git a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md index d1ec9f0efe7d37..0f914407f6b979 100644 --- a/docs/zh-CN/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md +++ b/docs/zh-CN/docs/sql-manual/sql-reference/Data-Manipulation-Statements/Manipulation/INSERT.md @@ -65,6 +65,8 @@ INSERT [IGNORE] INTO table_name > 2. SHUFFLE:当目标表是分区表,开启这个 hint 会进行 repartiiton。 > 3. NOSHUFFLE:即使目标表是分区表,也不会进行 repartiiton,但会做一些其他操作以保证数据正确落到各个分区中。 +对于开启了merge-on-write的Unique表,还可以使用insert语句进行部分列更新的操作。要使用insert语句进行部分列更新,需要将会话变量enable_uniuqe_key_partial_update的值设置为true(该变量默认值为false,即默认无法通过insert语句进行部分列更新)。进行部分列更新时,插入的列必须至少包含所有的Key列,同时指定需要更新的列。如果插入行Key列的值在原表中存在,则将更新具有相同key列值那一行的数据。如果插入行Key列的值在原表中不存在,则将向表中插入一条新的数据,此时insert语句中没有指定的列必须有默认值或可以为null,这些缺失列会首先尝试用默认值填充,如果该列没有默认值,则尝试使用null值填充,如果该列不能为null,则本次插入失败。 + 注意: 当前执行 `INSERT` 语句时,对于有不符合目标表格式的数据,默认的行为是过滤,比如字符串超长等。但是对于有要求数据不能够被过滤的业务场景,可以通过设置会话变量 `enable_insert_strict` 为 `true` 来确保当有数据被过滤掉的时候,`INSERT` 不会被执行成功。 diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java index a43d339be852ab..2a282be2778eef 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java @@ -203,6 +203,7 @@ private void constructInsertStmt() throws AnalysisException { null, isPartialUpdate, false); + ((NativeInsertStmt) insertStmt).setIsFromDeleteOrUpdateStmt(true); } private void analyzeTargetTable(Analyzer analyzer) throws UserException { diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java index 96962c6dccf2ae..68076b9e7a237c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java @@ -167,6 +167,7 @@ public class NativeInsertStmt extends InsertStmt { public boolean isInnerGroupCommit = false; private boolean isInsertIgnore = false; + private boolean isFromDeleteOrUpdateStmt = false; public NativeInsertStmt(InsertTarget target, String label, List cols, InsertSource source, List hints) { @@ -405,8 +406,10 @@ public void analyze(Analyzer analyzer) throws UserException { OlapTableSink sink = (OlapTableSink) dataSink; TUniqueId loadId = analyzer.getContext().queryId(); int sendBatchParallelism = analyzer.getContext().getSessionVariable().getSendBatchParallelism(); + boolean isInsertStrict = analyzer.getContext().getSessionVariable().getEnableInsertStrict() + && !isFromDeleteOrUpdateStmt; sink.init(loadId, transactionId, db.getId(), timeoutSecond, - sendBatchParallelism, false, false, isInsertIgnore); + sendBatchParallelism, false, isInsertStrict, isInsertIgnore); } } @@ -618,6 +621,10 @@ private void analyzeSubquery(Analyzer analyzer, boolean skipCheck) throws UserEx ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_COUNT); } + if (analyzer.getContext().getSessionVariable().isEnableUniqueKeyPartialUpdate()) { + trySetPartialUpdate(); + } + // Check if all columns mentioned is enough checkColumnCoverage(mentionedColumns, targetTable.getBaseSchema()); @@ -1098,4 +1105,54 @@ public int getBaseSchemaVersion() { public ByteString getRangeBytes() { return rangeBytes; } + + public void setIsFromDeleteOrUpdateStmt(boolean isFromDeleteOrUpdateStmt) { + this.isFromDeleteOrUpdateStmt = isFromDeleteOrUpdateStmt; + } + + private void trySetPartialUpdate() throws UserException { + if (isFromDeleteOrUpdateStmt || isPartialUpdate || !(targetTable instanceof OlapTable)) { + return; + } + OlapTable olapTable = (OlapTable) targetTable; + if (!olapTable.getEnableUniqueKeyMergeOnWrite()) { + throw new UserException("Partial update is only allowed in unique table with merge-on-write enabled."); + } + for (Column col : olapTable.getFullSchema()) { + boolean exists = false; + for (Column insertCol : targetColumns) { + if (insertCol.getName() != null && insertCol.getName().equals(col.getName())) { + if (!col.isVisible() && !Column.DELETE_SIGN.equals(col.getName())) { + throw new UserException("Partial update should not include invisible column except" + + " delete sign column: " + col.getName()); + } + exists = true; + break; + } + } + if (col.isKey() && !exists) { + throw new UserException("Partial update should include all key columns, missing: " + col.getName()); + } + } + + isPartialUpdate = true; + partialUpdateCols.addAll(targetColumnNames); + if (isPartialUpdate && olapTable.hasSequenceCol() && olapTable.getSequenceMapCol() != null + && partialUpdateCols.contains(olapTable.getSequenceMapCol())) { + partialUpdateCols.add(Column.SEQUENCE_COL); + } + // we should re-generate olapTuple + DescriptorTable descTable = analyzer.getDescTbl(); + olapTuple = descTable.createTupleDescriptor(); + for (Column col : olapTable.getFullSchema()) { + if (!partialUpdateCols.contains(col.getName())) { + continue; + } + SlotDescriptor slotDesc = descTable.addSlotDescriptor(olapTuple); + slotDesc.setIsMaterialized(true); + slotDesc.setType(col.getType()); + slotDesc.setColumn(col); + slotDesc.setIsNullable(col.isAllowNull()); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/UpdateStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/UpdateStmt.java index 315c9a1fbb74ec..0decc606ef2a67 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/UpdateStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/UpdateStmt.java @@ -127,6 +127,7 @@ private void constructInsertStmt() { null, isPartialUpdate, false); + ((NativeInsertStmt) insertStmt).setIsFromDeleteOrUpdateStmt(true); } private void analyzeTargetTable(Analyzer analyzer) throws UserException { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOlapTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOlapTableSink.java index a385f7164c028a..1880581143ad37 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOlapTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOlapTableSink.java @@ -47,29 +47,39 @@ public class UnboundOlapTableSink extends LogicalSink hints; private final List partitions; private final boolean isPartialUpdate; + private final boolean isFromNativeInsertStmt; public UnboundOlapTableSink(List nameParts, List colNames, List hints, List partitions, CHILD_TYPE child) { - this(nameParts, colNames, hints, partitions, false, Optional.empty(), Optional.empty(), child); + this(nameParts, colNames, hints, partitions, false, false, Optional.empty(), Optional.empty(), child); } public UnboundOlapTableSink(List nameParts, List colNames, List hints, List partitions, boolean isPartialUpdate, CHILD_TYPE child) { - this(nameParts, colNames, hints, partitions, isPartialUpdate, Optional.empty(), Optional.empty(), child); + this(nameParts, colNames, hints, partitions, isPartialUpdate, false, + Optional.empty(), Optional.empty(), child); + } + + public UnboundOlapTableSink(List nameParts, List colNames, List hints, + List partitions, boolean isPartialUpdate, boolean isFromNativeInsertStmt, CHILD_TYPE child) { + this(nameParts, colNames, hints, partitions, isPartialUpdate, isFromNativeInsertStmt, + Optional.empty(), Optional.empty(), child); } /** * constructor */ public UnboundOlapTableSink(List nameParts, List colNames, List hints, - List partitions, boolean isPartialUpdate, Optional groupExpression, - Optional logicalProperties, CHILD_TYPE child) { + List partitions, boolean isPartialUpdate, boolean isFromNativeInsertStmt, + Optional groupExpression, Optional logicalProperties, + CHILD_TYPE child) { super(PlanType.LOGICAL_UNBOUND_OLAP_TABLE_SINK, ImmutableList.of(), groupExpression, logicalProperties, child); this.nameParts = Utils.copyRequiredList(nameParts); this.colNames = Utils.copyRequiredList(colNames); this.hints = Utils.copyRequiredList(hints); this.partitions = Utils.copyRequiredList(partitions); this.isPartialUpdate = isPartialUpdate; + this.isFromNativeInsertStmt = isFromNativeInsertStmt; } public List getColNames() { @@ -92,11 +102,15 @@ public boolean isPartialUpdate() { return isPartialUpdate; } + public boolean isFromNativeInsertStmt() { + return isFromNativeInsertStmt; + } + @Override public Plan withChildren(List children) { Preconditions.checkArgument(children.size() == 1, "UnboundOlapTableSink only accepts one child"); return new UnboundOlapTableSink<>(nameParts, colNames, hints, partitions, isPartialUpdate, - groupExpression, Optional.of(getLogicalProperties()), children.get(0)); + isFromNativeInsertStmt, groupExpression, Optional.of(getLogicalProperties()), children.get(0)); } @Override @@ -131,15 +145,15 @@ public int hashCode() { @Override public Plan withGroupExpression(Optional groupExpression) { - return new UnboundOlapTableSink<>(nameParts, colNames, hints, partitions, - isPartialUpdate, groupExpression, Optional.of(getLogicalProperties()), child()); + return new UnboundOlapTableSink<>(nameParts, colNames, hints, partitions, isPartialUpdate, + isFromNativeInsertStmt, groupExpression, Optional.of(getLogicalProperties()), child()); } @Override public Plan withGroupExprLogicalPropChildren(Optional groupExpression, Optional logicalProperties, List children) { return new UnboundOlapTableSink<>(nameParts, colNames, hints, partitions, - isPartialUpdate, groupExpression, logicalProperties, children.get(0)); + isPartialUpdate, isFromNativeInsertStmt, groupExpression, logicalProperties, children.get(0)); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java index bb71fa59d81de2..045082befdd403 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java @@ -367,11 +367,34 @@ public PlanFragment visitPhysicalOlapTableSink(PhysicalOlapTableSink partialUpdateCols = new HashSet<>(); + for (Column col : olapTable.getFullSchema()) { + boolean exists = false; + for (Column insertCol : olapTableSink.getCols()) { + if (insertCol.getName() != null && insertCol.getName().equals(col.getName())) { + exists = true; + break; + } + } + if (col.isKey() && !exists) { + throw new AnalysisException("Partial update should include all key columns, missing: " + + col.getName()); + } + } for (Column col : olapTableSink.getCols()) { partialUpdateCols.add(col.getName()); } + if (olapTable.hasSequenceCol() && olapTable.getSequenceMapCol() != null + && partialUpdateCols.contains(olapTable.getSequenceMapCol())) { + partialUpdateCols.add(Column.SEQUENCE_COL); + } sink.setPartialUpdateInputColumns(true, partialUpdateCols); } rootFragment.setSink(sink); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 19a9a244bee8b5..5183b0c4b0456d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -411,6 +411,8 @@ public LogicalPlan visitInsertIntoQuery(InsertIntoQueryContext ctx) { colNames, ImmutableList.of(), partitions, + false, + true, visitQuery(ctx.query())); if (ctx.explain() != null) { return withExplain(sink, ctx.explain()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java index 459723e466a0a0..7632a10f5d2071 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java @@ -84,6 +84,7 @@ public List buildRules() { .map(NamedExpression.class::cast) .collect(ImmutableList.toImmutableList()), sink.isPartialUpdate(), + sink.isFromNativeInsertStmt(), sink.child()); // we need to insert all the columns of the target table diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalOlapTableSinkToPhysicalOlapTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalOlapTableSinkToPhysicalOlapTableSink.java index 8927b8bd137134..e8cfaa9f804d22 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalOlapTableSinkToPhysicalOlapTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalOlapTableSinkToPhysicalOlapTableSink.java @@ -40,6 +40,7 @@ public Rule build() { sink.getCols(), ctx.connectContext.getSessionVariable().isEnableSingleReplicaInsert(), sink.isPartialUpdate(), + sink.isFromNativeInsertStmt(), Optional.empty(), sink.getLogicalProperties(), sink.child()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java index 9563fd32ffd83a..9d47aa2ecb21d8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/InsertIntoTableCommand.java @@ -250,6 +250,8 @@ private boolean insertInto(ConnectContext ctx, StmtExecutor executor, List extends LogicalSink cols; private final List partitionIds; private final boolean isPartialUpdate; + private final boolean isFromNativeInsertStmt; public LogicalOlapTableSink(Database database, OlapTable targetTable, List cols, List partitionIds, - List outputExprs, boolean isPartialUpdate, CHILD_TYPE child) { - this(database, targetTable, cols, partitionIds, outputExprs, isPartialUpdate, + List outputExprs, boolean isPartialUpdate, boolean isFromNativeInsertStmt, + CHILD_TYPE child) { + this(database, targetTable, cols, partitionIds, outputExprs, isPartialUpdate, isFromNativeInsertStmt, Optional.empty(), Optional.empty(), child); } @@ -58,13 +60,14 @@ public LogicalOlapTableSink(Database database, OlapTable targetTable, List cols, List partitionIds, List outputExprs, boolean isPartialUpdate, - Optional groupExpression, Optional logicalProperties, - CHILD_TYPE child) { + boolean isFromNativeInsertStmt, Optional groupExpression, + Optional logicalProperties, CHILD_TYPE child) { super(PlanType.LOGICAL_OLAP_TABLE_SINK, outputExprs, groupExpression, logicalProperties, child); this.database = Objects.requireNonNull(database, "database != null in LogicalOlapTableSink"); this.targetTable = Objects.requireNonNull(targetTable, "targetTable != null in LogicalOlapTableSink"); this.cols = Utils.copyRequiredList(cols); this.isPartialUpdate = isPartialUpdate; + this.isFromNativeInsertStmt = isFromNativeInsertStmt; this.partitionIds = Utils.copyRequiredList(partitionIds); } @@ -73,14 +76,14 @@ public Plan withChildAndUpdateOutput(Plan child) { .map(NamedExpression.class::cast) .collect(ImmutableList.toImmutableList()); return new LogicalOlapTableSink<>(database, targetTable, cols, partitionIds, output, isPartialUpdate, - Optional.empty(), Optional.empty(), child); + isFromNativeInsertStmt, Optional.empty(), Optional.empty(), child); } @Override public Plan withChildren(List children) { Preconditions.checkArgument(children.size() == 1, "LogicalOlapTableSink only accepts one child"); return new LogicalOlapTableSink<>(database, targetTable, cols, partitionIds, outputExprs, isPartialUpdate, - Optional.empty(), Optional.empty(), children.get(0)); + isFromNativeInsertStmt, Optional.empty(), Optional.empty(), children.get(0)); } public Database getDatabase() { @@ -103,6 +106,10 @@ public boolean isPartialUpdate() { return isPartialUpdate; } + public boolean isFromNativeInsertStmt() { + return isFromNativeInsertStmt; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -115,14 +122,16 @@ public boolean equals(Object o) { return false; } LogicalOlapTableSink that = (LogicalOlapTableSink) o; - return isPartialUpdate == that.isPartialUpdate && Objects.equals(database, that.database) + return isPartialUpdate == that.isPartialUpdate && isFromNativeInsertStmt == that.isFromNativeInsertStmt + && Objects.equals(database, that.database) && Objects.equals(targetTable, that.targetTable) && Objects.equals(cols, that.cols) && Objects.equals(partitionIds, that.partitionIds); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), database, targetTable, cols, partitionIds, isPartialUpdate); + return Objects.hash(super.hashCode(), database, targetTable, cols, partitionIds, + isPartialUpdate, isFromNativeInsertStmt); } @Override @@ -133,13 +142,13 @@ public R accept(PlanVisitor visitor, C context) { @Override public Plan withGroupExpression(Optional groupExpression) { return new LogicalOlapTableSink<>(database, targetTable, cols, partitionIds, outputExprs, isPartialUpdate, - groupExpression, Optional.of(getLogicalProperties()), child()); + isFromNativeInsertStmt, groupExpression, Optional.of(getLogicalProperties()), child()); } @Override public Plan withGroupExprLogicalPropChildren(Optional groupExpression, Optional logicalProperties, List children) { return new LogicalOlapTableSink<>(database, targetTable, cols, partitionIds, outputExprs, isPartialUpdate, - groupExpression, logicalProperties, children.get(0)); + isFromNativeInsertStmt, groupExpression, logicalProperties, children.get(0)); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java index ac3ce8d4ac55df..ed97e1e5c1d245 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java @@ -59,14 +59,15 @@ public class PhysicalOlapTableSink extends PhysicalSink private final List partitionIds; private final boolean singleReplicaLoad; private final boolean isPartialUpdate; + private final boolean isFromNativeInsertStmt; /** * Constructor */ public PhysicalOlapTableSink(Database database, OlapTable targetTable, List partitionIds, List cols, - boolean singleReplicaLoad, boolean isPartialUpdate, Optional groupExpression, - LogicalProperties logicalProperties, CHILD_TYPE child) { - this(database, targetTable, partitionIds, cols, singleReplicaLoad, isPartialUpdate, + boolean singleReplicaLoad, boolean isPartialUpdate, boolean isFromNativeInsertStmt, + Optional groupExpression, LogicalProperties logicalProperties, CHILD_TYPE child) { + this(database, targetTable, partitionIds, cols, singleReplicaLoad, isPartialUpdate, isFromNativeInsertStmt, groupExpression, logicalProperties, PhysicalProperties.GATHER, null, child); } @@ -74,9 +75,9 @@ public PhysicalOlapTableSink(Database database, OlapTable targetTable, List partitionIds, List cols, - boolean singleReplicaLoad, boolean isPartialUpdate, Optional groupExpression, - LogicalProperties logicalProperties, PhysicalProperties physicalProperties, Statistics statistics, - CHILD_TYPE child) { + boolean singleReplicaLoad, boolean isPartialUpdate, boolean isFromNativeInsertStmt, + Optional groupExpression, LogicalProperties logicalProperties, + PhysicalProperties physicalProperties, Statistics statistics, CHILD_TYPE child) { super(PlanType.PHYSICAL_OLAP_TABLE_SINK, groupExpression, logicalProperties, physicalProperties, statistics, child); this.database = Objects.requireNonNull(database, "database != null in PhysicalOlapTableSink"); @@ -85,6 +86,7 @@ public PhysicalOlapTableSink(Database database, OlapTable targetTable, List children) { Preconditions.checkArgument(children.size() == 1, "PhysicalOlapTableSink only accepts one child"); return new PhysicalOlapTableSink<>(database, targetTable, partitionIds, cols, - singleReplicaLoad, isPartialUpdate, groupExpression, getLogicalProperties(), physicalProperties, - statistics, children.get(0)); + singleReplicaLoad, isPartialUpdate, isFromNativeInsertStmt, groupExpression, + getLogicalProperties(), physicalProperties, statistics, children.get(0)); } @Override @@ -130,6 +136,7 @@ public boolean equals(Object o) { PhysicalOlapTableSink that = (PhysicalOlapTableSink) o; return singleReplicaLoad == that.singleReplicaLoad && isPartialUpdate == that.isPartialUpdate + && isFromNativeInsertStmt == that.isFromNativeInsertStmt && Objects.equals(database, that.database) && Objects.equals(targetTable, that.targetTable) && Objects.equals(cols, that.cols) @@ -138,7 +145,8 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(database, targetTable, cols, partitionIds, singleReplicaLoad, isPartialUpdate); + return Objects.hash(database, targetTable, cols, partitionIds, singleReplicaLoad, + isPartialUpdate, isFromNativeInsertStmt); } @Override @@ -172,20 +180,21 @@ public List getExpressions() { @Override public Plan withGroupExpression(Optional groupExpression) { return new PhysicalOlapTableSink<>(database, targetTable, partitionIds, cols, singleReplicaLoad, - isPartialUpdate, groupExpression, getLogicalProperties(), child()); + isPartialUpdate, isFromNativeInsertStmt, groupExpression, getLogicalProperties(), child()); } @Override public Plan withGroupExprLogicalPropChildren(Optional groupExpression, Optional logicalProperties, List children) { return new PhysicalOlapTableSink<>(database, targetTable, partitionIds, cols, singleReplicaLoad, - isPartialUpdate, groupExpression, logicalProperties.get(), children.get(0)); + isPartialUpdate, isFromNativeInsertStmt, groupExpression, logicalProperties.get(), children.get(0)); } @Override public PhysicalPlan withPhysicalPropertiesAndStats(PhysicalProperties physicalProperties, Statistics statistics) { return new PhysicalOlapTableSink<>(database, targetTable, partitionIds, cols, singleReplicaLoad, - isPartialUpdate, groupExpression, getLogicalProperties(), physicalProperties, statistics, child()); + isPartialUpdate, isFromNativeInsertStmt, groupExpression, getLogicalProperties(), + physicalProperties, statistics, child()); } /** @@ -225,6 +234,7 @@ public PhysicalProperties getRequirePhysicalProperties() { @Override public PhysicalOlapTableSink resetLogicalProperties() { return new PhysicalOlapTableSink<>(database, targetTable, partitionIds, cols, singleReplicaLoad, - isPartialUpdate, groupExpression, null, physicalProperties, statistics, child()); + isPartialUpdate, isFromNativeInsertStmt, groupExpression, + null, physicalProperties, statistics, child()); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index bb1aabf4cf9b1d..590a87b3273284 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -400,6 +400,8 @@ public class SessionVariable implements Serializable, Writable { public static final String ENABLE_MEMTABLE_ON_SINK_NODE = "enable_memtable_on_sink_node"; + public static final String ENABLE_UNIQUE_KEY_PARTIAL_UPDATE = "enable_unique_key_partial_update"; + public static final String INVERTED_INDEX_CONJUNCTION_OPT_THRESHOLD = "inverted_index_conjunction_opt_threshold"; public static final String FULL_AUTO_ANALYZE_START_TIME = "full_auto_analyze_start_time"; @@ -1192,6 +1194,9 @@ public void setMaxJoinNumberOfReorder(int maxJoinNumberOfReorder) { flag = VariableMgr.GLOBAL) public String fullAutoAnalyzeEndTime = ""; + @VariableMgr.VarAttr(name = ENABLE_UNIQUE_KEY_PARTIAL_UPDATE, needForward = false) + public boolean enableUniqueKeyPartialUpdate = false; + // If this fe is in fuzzy mode, then will use initFuzzyModeVariables to generate some variables, // not the default value set in the code. public void initFuzzyModeVariables() { @@ -2198,6 +2203,14 @@ public void setTruncateCharOrVarcharColumns(boolean truncateCharOrVarcharColumns this.truncateCharOrVarcharColumns = truncateCharOrVarcharColumns; } + public boolean isEnableUniqueKeyPartialUpdate() { + return enableUniqueKeyPartialUpdate; + } + + public void setEnableUniqueKeyPartialUpdate(boolean enableUniqueKeyPartialUpdate) { + this.enableUniqueKeyPartialUpdate = enableUniqueKeyPartialUpdate; + } + /** * Serialize to thrift object. * Used for rest api. diff --git a/regression-test/data/nereids_p0/insert_into_table/partial_update.out b/regression-test/data/nereids_p0/insert_into_table/partial_update.out new file mode 100644 index 00000000000000..d3362b159a1431 --- /dev/null +++ b/regression-test/data/nereids_p0/insert_into_table/partial_update.out @@ -0,0 +1,66 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !1 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 + +-- !1 -- +1 doris 200 123 1 +2 doris2 400 223 1 +4 yixiu 400 \N 4321 + +-- !2 -- +1 doris 1000 123 1 2023-01-01 +2 doris2 2000 223 1 2023-01-01 + +-- !2 -- +1 doris 1000 123 1 2023-01-01 +2 doris2 2600 223 1 2023-07-20 +3 unknown 2500 \N 4321 2022-07-18 + +-- !3 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 + +-- !3 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 + +-- !4 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 +3 doris3 5000 34 345 + +-- !4 -- +1 doris 1000 123 1 +3 doris3 5000 34 345 + +-- !5 -- +1 kevin 18 shenzhen 400 2023-07-01T12:00 + +-- !5 -- +1 kevin 18 shenzhen 400 2023-07-01T12:00 + +-- !6 -- +1 1 3 4 +2 2 4 5 +3 3 2 3 +4 4 1 2 + +-- !6 -- +1 2 3 4 +2 3 4 5 +3 4 2 3 +4 5 1 2 + +-- !7 -- +1 1 1 3 4 +2 2 2 4 5 +3 3 3 2 3 +4 4 4 1 2 + +-- !7 -- +1 1 1 3 4 +2 2 2 4 5 +3 3 3 2 3 +4 4 4 1 2 + diff --git a/regression-test/data/nereids_p0/insert_into_table/partial_update_complex.out b/regression-test/data/nereids_p0/insert_into_table/partial_update_complex.out new file mode 100644 index 00000000000000..b79e8a8436b434 --- /dev/null +++ b/regression-test/data/nereids_p0/insert_into_table/partial_update_complex.out @@ -0,0 +1,37 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !tbl1 -- +1 1 1 1.0 2000-01-01 +2 2 2 2.0 2000-01-02 +3 3 3 3.0 2000-01-03 + +-- !tbl2 -- +1 10 10 10.0 2000-01-10 +2 20 20 20.0 2000-01-20 +3 30 30 30.0 2000-01-30 +4 4 4 4.0 2000-01-04 +5 5 5 5.0 2000-01-05 + +-- !tbl3 -- +1 +3 +5 + +-- !select_result -- +1 10 1000.0 +3 30 3000.0 +5 5 500.0 + +-- !complex_update -- +1 10 1 1000.0 2000-01-01 +2 2 2 2.0 2000-01-02 +3 30 3 3000.0 2000-01-03 +5 5 \N 500.0 \N + +-- !select_result -- +1 1 +3 1 +5 1 + +-- !complex_delete -- +2 2 2 2.0 2000-01-02 + diff --git a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.out b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.out new file mode 100644 index 00000000000000..d3362b159a1431 --- /dev/null +++ b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.out @@ -0,0 +1,66 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !1 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 + +-- !1 -- +1 doris 200 123 1 +2 doris2 400 223 1 +4 yixiu 400 \N 4321 + +-- !2 -- +1 doris 1000 123 1 2023-01-01 +2 doris2 2000 223 1 2023-01-01 + +-- !2 -- +1 doris 1000 123 1 2023-01-01 +2 doris2 2600 223 1 2023-07-20 +3 unknown 2500 \N 4321 2022-07-18 + +-- !3 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 + +-- !3 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 + +-- !4 -- +1 doris 1000 123 1 +2 doris2 2000 223 1 +3 doris3 5000 34 345 + +-- !4 -- +1 doris 1000 123 1 +3 doris3 5000 34 345 + +-- !5 -- +1 kevin 18 shenzhen 400 2023-07-01T12:00 + +-- !5 -- +1 kevin 18 shenzhen 400 2023-07-01T12:00 + +-- !6 -- +1 1 3 4 +2 2 4 5 +3 3 2 3 +4 4 1 2 + +-- !6 -- +1 2 3 4 +2 3 4 5 +3 4 2 3 +4 5 1 2 + +-- !7 -- +1 1 1 3 4 +2 2 2 4 5 +3 3 3 2 3 +4 4 4 1 2 + +-- !7 -- +1 1 1 3 4 +2 2 2 4 5 +3 3 3 2 3 +4 4 4 1 2 + diff --git a/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.out b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.out new file mode 100644 index 00000000000000..b79e8a8436b434 --- /dev/null +++ b/regression-test/data/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.out @@ -0,0 +1,37 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !tbl1 -- +1 1 1 1.0 2000-01-01 +2 2 2 2.0 2000-01-02 +3 3 3 3.0 2000-01-03 + +-- !tbl2 -- +1 10 10 10.0 2000-01-10 +2 20 20 20.0 2000-01-20 +3 30 30 30.0 2000-01-30 +4 4 4 4.0 2000-01-04 +5 5 5 5.0 2000-01-05 + +-- !tbl3 -- +1 +3 +5 + +-- !select_result -- +1 10 1000.0 +3 30 3000.0 +5 5 500.0 + +-- !complex_update -- +1 10 1 1000.0 2000-01-01 +2 2 2 2.0 2000-01-02 +3 30 3 3000.0 2000-01-03 +5 5 \N 500.0 \N + +-- !select_result -- +1 1 +3 1 +5 1 + +-- !complex_delete -- +2 2 2 2.0 2000-01-02 + diff --git a/regression-test/suites/nereids_p0/insert_into_table/partial_update.groovy b/regression-test/suites/nereids_p0/insert_into_table/partial_update.groovy new file mode 100644 index 00000000000000..472acfc0c5e02f --- /dev/null +++ b/regression-test/suites/nereids_p0/insert_into_table/partial_update.groovy @@ -0,0 +1,227 @@ + +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("nereids_partial_update_native_insert_stmt", "p0") { + sql "set enable_nereids_dml=true;" + sql "set experimental_enable_nereids_planner=true;" + sql "set enable_fallback_to_original_planner=false;" + sql "sync;" + + // sql 'set enable_fallback_to_original_planner=false' + def tableName = "nereids_partial_update_native_insert_stmt" + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE ${tableName} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) NOT NULL DEFAULT "yixiu" COMMENT "用户姓名", + `score` int(11) NOT NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321") + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true") + """ + sql """insert into ${tableName} values(2, "doris2", 2000, 223, 1),(1, "doris", 1000, 123, 1)""" + qt_1 """ select * from ${tableName} order by id; """ + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + // partial update using insert stmt in non-strict mode, + // existing rows should be updated and new rows should be inserted with unmentioned columns filled with default or null value + sql """insert into ${tableName}(id,score) values(2,400),(1,200),(4,400)""" + qt_1 """ select * from ${tableName} order by id; """ + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName} """ + + + def tableName2 = "nereids_partial_update_native_insert_stmt2" + sql """ DROP TABLE IF EXISTS ${tableName2} """ + sql """ + CREATE TABLE ${tableName2} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) DEFAULT "unknown" COMMENT "用户姓名", + `score` int(11) NOT NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321", + `update_time` date NULL) + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES( + "replication_num" = "1", + "enable_unique_key_merge_on_write" = "true", + "function_column.sequence_col" = "update_time" + )""" + sql """ insert into ${tableName2} values + (2, "doris2", 2000, 223, 1, '2023-01-01'), + (1, "doris", 1000, 123, 1, '2023-01-01');""" + qt_2 "select * from ${tableName2} order by id;" + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + // partial update with seq col + sql """ insert into ${tableName2}(id,score,update_time) values + (2,2500,"2023-07-19"), + (2,2600,"2023-07-20"), + (1,1300,"2022-07-19"), + (3,1500,"2022-07-20"), + (3,2500,"2022-07-18"); """ + qt_2 "select * from ${tableName2} order by id;" + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName2}; """ + + + def tableName3 = "nereids_partial_update_native_insert_stmt3" + sql """ DROP TABLE IF EXISTS ${tableName3}; """ + sql """ + CREATE TABLE ${tableName3} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) NOT NULL COMMENT "用户姓名", + `score` int(11) NOT NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321") + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true") + """ + sql """insert into ${tableName3} values(2, "doris2", 2000, 223, 1),(1, "doris", 1000, 123, 1);""" + qt_3 """ select * from ${tableName3} order by id; """ + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + // in partial update, the unmentioned columns should have default values or be nullable + // but field `name` is not nullable and doesn't have default value + test { + sql """insert into ${tableName3}(id,score) values(2,400),(1,200),(4,400)""" + exception "INTERNAL_ERROR" + } + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + qt_3 """ select * from ${tableName3} order by id; """ + sql """ DROP TABLE IF EXISTS ${tableName3} """ + + + def tableName4 = "nereids_partial_update_native_insert_stmt4" + sql """ DROP TABLE IF EXISTS ${tableName4} """ + sql """ + CREATE TABLE ${tableName4} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) NOT NULL DEFAULT "yixiu" COMMENT "用户姓名", + `score` int(11) NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321") + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true") + """ + sql """insert into ${tableName4} values(2, "doris2", 2000, 223, 1),(1, "doris", 1000, 123, 1),(3,"doris3",5000,34,345);""" + qt_4 """ select * from ${tableName4} order by id; """ + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + // partial update with delete sign + sql "insert into ${tableName4}(id,__DORIS_DELETE_SIGN__) values(2,1);" + qt_4 """ select * from ${tableName4} order by id; """ + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName4} """ + + + def tableName5 = "nereids_partial_update_native_insert_stmt5" + sql """ DROP TABLE IF EXISTS ${tableName5} """ + sql """ + CREATE TABLE ${tableName5} ( + `id` int(11) NULL, + `name` varchar(10) NULL, + `age` int(11) NULL DEFAULT "20", + `city` varchar(10) NOT NULL DEFAULT "beijing", + `balance` decimalv3(9, 0) NULL, + `last_access_time` datetime NULL + ) ENGINE = OLAP UNIQUE KEY(`id`) + COMMENT 'OLAP' DISTRIBUTED BY HASH(`id`) + BUCKETS AUTO PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "storage_format" = "V2", + "enable_unique_key_merge_on_write" = "true", + "light_schema_change" = "true", + "disable_auto_compaction" = "false", + "enable_single_replica_compaction" = "false" + ); + """ + sql """insert into ${tableName5} values(1,"kevin",18,"shenzhen",400,"2023-07-01 12:00:00");""" + qt_5 """select * from ${tableName5} order by id;""" + sql "set enable_insert_strict = true;" + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + // partial update using insert stmt in strict mode, the max_filter_ratio is always 0 + test { + sql """ insert into ${tableName5}(id,balance,last_access_time) values(1,500,"2023-07-03 12:00:01"),(3,23,"2023-07-03 12:00:02"),(18,9999999,"2023-07-03 12:00:03"); """ + exception "Insert has filtered data in strict mode" + } + qt_5 """select * from ${tableName5} order by id;""" + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName5}; """ + + def tableName6 = "nereids_partial_update_native_insert_stmt6" + sql """ DROP TABLE IF EXISTS ${tableName6} """ + sql """create table ${tableName6} ( + k int null, + v int null, + v2 int null, + v3 int null + ) unique key (k) distributed by hash(k) buckets 1 + properties("replication_num" = "1", + "enable_unique_key_merge_on_write"="true", + "disable_auto_compaction"="true"); """ + sql "insert into ${tableName6} values(1,1,3,4),(2,2,4,5),(3,3,2,3),(4,4,1,2);" + qt_6 "select * from ${tableName6} order by k;" + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + sql "insert into ${tableName6}(k,v) select v2,v3 from ${tableName6};" + qt_6 "select * from ${tableName6} order by k;" + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName6}; """ + + def tableName7 = "nereids_partial_update_native_insert_stmt7" + sql """ DROP TABLE IF EXISTS ${tableName7} """ + sql """create table ${tableName7} ( + k1 int null, + k2 int null, + k3 int null, + v1 int null, + v2 int null + ) unique key (k1,k2,k3) distributed by hash(k1,k2) buckets 4 + properties("replication_num" = "1", + "enable_unique_key_merge_on_write"="true", + "disable_auto_compaction"="true"); """ + sql "insert into ${tableName7} values(1,1,1,3,4),(2,2,2,4,5),(3,3,3,2,3),(4,4,4,1,2);" + qt_7 "select * from ${tableName7} order by k1;" + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + test { + sql "insert into ${tableName7}(k1,k2,v2) select k2,k3,v1 from ${tableName7};" + exception "Partial update should include all key columns, missing: k3" + } + qt_7 "select * from ${tableName7} order by k1;" + sql """ DROP TABLE IF EXISTS ${tableName7}; """ + + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "set enable_fallback_to_original_planner=true;" + sql "sync;" +} diff --git a/regression-test/suites/nereids_p0/insert_into_table/partial_update_complex.groovy b/regression-test/suites/nereids_p0/insert_into_table/partial_update_complex.groovy new file mode 100644 index 00000000000000..66945fed053f38 --- /dev/null +++ b/regression-test/suites/nereids_p0/insert_into_table/partial_update_complex.groovy @@ -0,0 +1,117 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("nereids_partial_update_native_insert_stmt_complex", "p0") { + sql "set enable_nereids_dml=true;" + sql "set experimental_enable_nereids_planner=true;" + sql "set enable_fallback_to_original_planner=false;" + sql "sync;" + + // test complex partial update + def tbName1 = "nereids_partial_update_native_insert_stmt_complex1" + def tbName2 = "nereids_partial_update_native_insert_stmt_complex2" + def tbName3 = "nereids_partial_update_native_insert_stmt_complex3" + + sql "DROP TABLE IF EXISTS ${tbName1}" + sql "DROP TABLE IF EXISTS ${tbName2}" + sql "DROP TABLE IF EXISTS ${tbName3}" + + sql """create table ${tbName1} ( + id int, + c1 bigint, + c2 string, + c3 double, + c4 date) unique key (id) distributed by hash(id) + properties('replication_num'='1', 'enable_unique_key_merge_on_write' = 'true');""" + + sql """create table ${tbName2} ( + id int, + c1 bigint, + c2 string, + c3 double, + c4 date) unique key (id) distributed by hash(id) + properties('replication_num'='1', 'enable_unique_key_merge_on_write' = 'true');""" + + sql """create table ${tbName3} (id int) distributed by hash (id) properties('replication_num'='1');""" + + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """insert into ${tbName1} values + (1, 1, '1', 1.0, '2000-01-01'), + (2, 2, '2', 2.0, '2000-01-02'), + (3, 3, '3', 3.0, '2000-01-03');""" + sql """insert into ${tbName2} values + (1, 10, '10', 10.0, '2000-01-10'), + (2, 20, '20', 20.0, '2000-01-20'), + (3, 30, '30', 30.0, '2000-01-30'), + (4, 4, '4', 4.0, '2000-01-04'), + (5, 5, '5', 5.0, '2000-01-05');""" + sql """insert into ${tbName3} values(1), (3), (5);""" + + qt_tbl1 "select * from ${tbName1} order by id;" + qt_tbl2 "select * from ${tbName2} order by id;" + qt_tbl3 "select * from ${tbName3} order by id;" + + qt_select_result """select ${tbName2}.id, ${tbName2}.c1, ${tbName2}.c3 * 100 + from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id order by ${tbName2}.id;""" + + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """insert into ${tbName1}(id, c1, c3) + select ${tbName2}.id, ${tbName2}.c1, ${tbName2}.c3 * 100 + from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id; """ + + qt_complex_update """select * from ${tbName1} order by id;""" + + sql "truncate table ${tbName1};" + sql "truncate table ${tbName2};" + sql "truncate table ${tbName3};" + + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """insert into ${tbName1} values + (1, 1, '1', 1.0, '2000-01-01'), + (2, 2, '2', 2.0, '2000-01-02'), + (3, 3, '3', 3.0, '2000-01-03');""" + sql """insert into ${tbName2} values + (1, 10, '10', 10.0, '2000-01-10'), + (2, 20, '20', 20.0, '2000-01-20'), + (3, 30, '30', 30.0, '2000-01-30'), + (4, 4, '4', 4.0, '2000-01-04'), + (5, 5, '5', 5.0, '2000-01-05');""" + sql """insert into ${tbName3} values(1), (3), (5);""" + + qt_select_result "select ${tbName2}.id,1 from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id order by ${tbName2}.id;" + + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """ insert into ${tbName1}(id, __DORIS_DELETE_SIGN__) + select ${tbName2}.id,1 from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id;""" + + qt_complex_delete """select * from ${tbName1} order by id;""" + + sql "DROP TABLE IF EXISTS ${tbName1}" + sql "DROP TABLE IF EXISTS ${tbName2}" + sql "DROP TABLE IF EXISTS ${tbName3}" + + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "set enable_fallback_to_original_planner=true;" + sql "sync;" +} diff --git a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.groovy b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.groovy new file mode 100644 index 00000000000000..23a44c846e09a1 --- /dev/null +++ b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt.groovy @@ -0,0 +1,227 @@ + +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_partial_update_native_insert_stmt", "p0") { + sql "set enable_nereids_dml=false;" + sql "set experimental_enable_nereids_planner=false;" + sql "set enable_fallback_to_original_planner=true;" + sql "sync;" + + // sql 'set enable_fallback_to_original_planner=false' + def tableName = "test_partial_update_native_insert_stmt" + sql """ DROP TABLE IF EXISTS ${tableName} """ + sql """ + CREATE TABLE ${tableName} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) NOT NULL DEFAULT "yixiu" COMMENT "用户姓名", + `score` int(11) NOT NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321") + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true") + """ + sql """insert into ${tableName} values(2, "doris2", 2000, 223, 1),(1, "doris", 1000, 123, 1)""" + qt_1 """ select * from ${tableName} order by id; """ + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + // partial update using insert stmt in non-strict mode, + // existing rows should be updated and new rows should be inserted with unmentioned columns filled with default or null value + sql """insert into ${tableName}(id,score) values(2,400),(1,200),(4,400)""" + qt_1 """ select * from ${tableName} order by id; """ + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName} """ + + + def tableName2 = "test_partial_update_native_insert_stmt2" + sql """ DROP TABLE IF EXISTS ${tableName2} """ + sql """ + CREATE TABLE ${tableName2} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) DEFAULT "unknown" COMMENT "用户姓名", + `score` int(11) NOT NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321", + `update_time` date NULL) + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES( + "replication_num" = "1", + "enable_unique_key_merge_on_write" = "true", + "function_column.sequence_col" = "update_time" + )""" + sql """ insert into ${tableName2} values + (2, "doris2", 2000, 223, 1, '2023-01-01'), + (1, "doris", 1000, 123, 1, '2023-01-01');""" + qt_2 "select * from ${tableName2} order by id;" + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + // partial update with seq col + sql """ insert into ${tableName2}(id,score,update_time) values + (2,2500,"2023-07-19"), + (2,2600,"2023-07-20"), + (1,1300,"2022-07-19"), + (3,1500,"2022-07-20"), + (3,2500,"2022-07-18"); """ + qt_2 "select * from ${tableName2} order by id;" + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName2}; """ + + + def tableName3 = "test_partial_update_native_insert_stmt3" + sql """ DROP TABLE IF EXISTS ${tableName3}; """ + sql """ + CREATE TABLE ${tableName3} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) NOT NULL COMMENT "用户姓名", + `score` int(11) NOT NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321") + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true") + """ + sql """insert into ${tableName3} values(2, "doris2", 2000, 223, 1),(1, "doris", 1000, 123, 1);""" + qt_3 """ select * from ${tableName3} order by id; """ + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + // in partial update, the unmentioned columns should have default values or be nullable + // but field `name` is not nullable and doesn't have default value + test { + sql """insert into ${tableName3}(id,score) values(2,400),(1,200),(4,400)""" + exception "INTERNAL_ERROR" + } + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + qt_3 """ select * from ${tableName3} order by id; """ + sql """ DROP TABLE IF EXISTS ${tableName3} """ + + + def tableName4 = "test_partial_update_native_insert_stmt4" + sql """ DROP TABLE IF EXISTS ${tableName4} """ + sql """ + CREATE TABLE ${tableName4} ( + `id` int(11) NOT NULL COMMENT "用户 ID", + `name` varchar(65533) NOT NULL DEFAULT "yixiu" COMMENT "用户姓名", + `score` int(11) NULL COMMENT "用户得分", + `test` int(11) NULL COMMENT "null test", + `dft` int(11) DEFAULT "4321") + UNIQUE KEY(`id`) DISTRIBUTED BY HASH(`id`) BUCKETS 1 + PROPERTIES("replication_num" = "1", "enable_unique_key_merge_on_write" = "true") + """ + sql """insert into ${tableName4} values(2, "doris2", 2000, 223, 1),(1, "doris", 1000, 123, 1),(3,"doris3",5000,34,345);""" + qt_4 """ select * from ${tableName4} order by id; """ + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + // partial update with delete sign + sql "insert into ${tableName4}(id,__DORIS_DELETE_SIGN__) values(2,1);" + qt_4 """ select * from ${tableName4} order by id; """ + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName4} """ + + + def tableName5 = "test_partial_update_native_insert_stmt5" + sql """ DROP TABLE IF EXISTS ${tableName5} """ + sql """ + CREATE TABLE ${tableName5} ( + `id` int(11) NULL, + `name` varchar(10) NULL, + `age` int(11) NULL DEFAULT "20", + `city` varchar(10) NOT NULL DEFAULT "beijing", + `balance` decimalv3(9, 0) NULL, + `last_access_time` datetime NULL + ) ENGINE = OLAP UNIQUE KEY(`id`) + COMMENT 'OLAP' DISTRIBUTED BY HASH(`id`) + BUCKETS AUTO PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "storage_format" = "V2", + "enable_unique_key_merge_on_write" = "true", + "light_schema_change" = "true", + "disable_auto_compaction" = "false", + "enable_single_replica_compaction" = "false" + ); + """ + sql """insert into ${tableName5} values(1,"kevin",18,"shenzhen",400,"2023-07-01 12:00:00");""" + qt_5 """select * from ${tableName5} order by id;""" + sql "set enable_insert_strict = true;" + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + // partial update using insert stmt in strict mode, the max_filter_ratio is always 0 + test { + sql """ insert into ${tableName5}(id,balance,last_access_time) values(1,500,"2023-07-03 12:00:01"),(3,23,"2023-07-03 12:00:02"),(18,9999999,"2023-07-03 12:00:03"); """ + exception "Insert has filtered data in strict mode" + } + qt_5 """select * from ${tableName5} order by id;""" + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName5}; """ + + def tableName6 = "test_partial_update_native_insert_stmt6" + sql """ DROP TABLE IF EXISTS ${tableName6} """ + sql """create table ${tableName6} ( + k int null, + v int null, + v2 int null, + v3 int null + ) unique key (k) distributed by hash(k) buckets 1 + properties("replication_num" = "1", + "enable_unique_key_merge_on_write"="true", + "disable_auto_compaction"="true"); """ + sql "insert into ${tableName6} values(1,1,3,4),(2,2,4,5),(3,3,2,3),(4,4,1,2);" + qt_6 "select * from ${tableName6} order by k;" + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + sql "insert into ${tableName6}(k,v) select v2,v3 from ${tableName6};" + qt_6 "select * from ${tableName6} order by k;" + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """ DROP TABLE IF EXISTS ${tableName6}; """ + + def tableName7 = "test_partial_update_native_insert_stmt7" + sql """ DROP TABLE IF EXISTS ${tableName7} """ + sql """create table ${tableName7} ( + k1 int null, + k2 int null, + k3 int null, + v1 int null, + v2 int null + ) unique key (k1,k2,k3) distributed by hash(k1,k2) buckets 4 + properties("replication_num" = "1", + "enable_unique_key_merge_on_write"="true", + "disable_auto_compaction"="true"); """ + sql "insert into ${tableName7} values(1,1,1,3,4),(2,2,2,4,5),(3,3,3,2,3),(4,4,4,1,2);" + qt_7 "select * from ${tableName7} order by k1;" + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + test { + sql "insert into ${tableName7}(k1,k2,v2) select k2,k3,v1 from ${tableName7};" + exception "Partial update should include all key columns, missing: k3" + } + qt_7 "select * from ${tableName7} order by k1;" + sql """ DROP TABLE IF EXISTS ${tableName7}; """ + + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "set experimental_enable_nereids_planner=true;" + sql "sync;" +} diff --git a/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.groovy b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.groovy new file mode 100644 index 00000000000000..bdb3a12dc56192 --- /dev/null +++ b/regression-test/suites/unique_with_mow_p0/partial_update/test_partial_update_native_insert_stmt_complex.groovy @@ -0,0 +1,117 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_partial_update_native_insert_stmt_complex", "p0") { + sql "set enable_nereids_dml=false;" + sql "set experimental_enable_nereids_planner=false;" + sql "set enable_fallback_to_original_planner=true;" + sql "sync;" + + // test complex partial update + def tbName1 = "test_partial_update_native_insert_stmt_complex1" + def tbName2 = "test_partial_update_native_insert_stmt_complex2" + def tbName3 = "test_partial_update_native_insert_stmt_complex3" + + sql "DROP TABLE IF EXISTS ${tbName1}" + sql "DROP TABLE IF EXISTS ${tbName2}" + sql "DROP TABLE IF EXISTS ${tbName3}" + + sql """create table ${tbName1} ( + id int, + c1 bigint, + c2 string, + c3 double, + c4 date) unique key (id) distributed by hash(id) + properties('replication_num'='1', 'enable_unique_key_merge_on_write' = 'true');""" + + sql """create table ${tbName2} ( + id int, + c1 bigint, + c2 string, + c3 double, + c4 date) unique key (id) distributed by hash(id) + properties('replication_num'='1', 'enable_unique_key_merge_on_write' = 'true');""" + + sql """create table ${tbName3} (id int) distributed by hash (id) properties('replication_num'='1');""" + + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """insert into ${tbName1} values + (1, 1, '1', 1.0, '2000-01-01'), + (2, 2, '2', 2.0, '2000-01-02'), + (3, 3, '3', 3.0, '2000-01-03');""" + sql """insert into ${tbName2} values + (1, 10, '10', 10.0, '2000-01-10'), + (2, 20, '20', 20.0, '2000-01-20'), + (3, 30, '30', 30.0, '2000-01-30'), + (4, 4, '4', 4.0, '2000-01-04'), + (5, 5, '5', 5.0, '2000-01-05');""" + sql """insert into ${tbName3} values(1), (3), (5);""" + + qt_tbl1 "select * from ${tbName1} order by id;" + qt_tbl2 "select * from ${tbName2} order by id;" + qt_tbl3 "select * from ${tbName3} order by id;" + + qt_select_result """select ${tbName2}.id, ${tbName2}.c1, ${tbName2}.c3 * 100 + from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id order by ${tbName2}.id;""" + + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """insert into ${tbName1}(id, c1, c3) + select ${tbName2}.id, ${tbName2}.c1, ${tbName2}.c3 * 100 + from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id; """ + + qt_complex_update """select * from ${tbName1} order by id;""" + + sql "truncate table ${tbName1};" + sql "truncate table ${tbName2};" + sql "truncate table ${tbName3};" + + sql "set enable_unique_key_partial_update=false;" + sql "sync;" + sql """insert into ${tbName1} values + (1, 1, '1', 1.0, '2000-01-01'), + (2, 2, '2', 2.0, '2000-01-02'), + (3, 3, '3', 3.0, '2000-01-03');""" + sql """insert into ${tbName2} values + (1, 10, '10', 10.0, '2000-01-10'), + (2, 20, '20', 20.0, '2000-01-20'), + (3, 30, '30', 30.0, '2000-01-30'), + (4, 4, '4', 4.0, '2000-01-04'), + (5, 5, '5', 5.0, '2000-01-05');""" + sql """insert into ${tbName3} values(1), (3), (5);""" + + qt_select_result "select ${tbName2}.id,1 from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id order by ${tbName2}.id;" + + sql "set enable_unique_key_partial_update=true;" + sql "set enable_insert_strict = false;" + sql "sync;" + sql """ insert into ${tbName1}(id, __DORIS_DELETE_SIGN__) + select ${tbName2}.id,1 from ${tbName2} inner join ${tbName3} on ${tbName2}.id = ${tbName3}.id;""" + + qt_complex_delete """select * from ${tbName1} order by id;""" + + sql "DROP TABLE IF EXISTS ${tbName1}" + sql "DROP TABLE IF EXISTS ${tbName2}" + sql "DROP TABLE IF EXISTS ${tbName3}" + + sql "set enable_unique_key_partial_update=false;" + sql "set enable_insert_strict = false;" + sql "set experimental_enable_nereids_planner=true;" + sql "sync;" +} From 0ccb032d795ac4d38e3d3c444d76d9e25f2b2603 Mon Sep 17 00:00:00 2001 From: yiguolei <676222867@qq.com> Date: Sat, 16 Sep 2023 18:17:58 +0800 Subject: [PATCH 11/33] [parameter](query timeout) change default query timeout to 15min (#24480) Co-authored-by: yiguolei --- docs/en/docs/advanced/variables.md | 2 +- docs/en/docs/data-table/advance-usage.md | 2 +- docs/zh-CN/docs/advanced/variables.md | 2 +- .../src/main/java/org/apache/doris/qe/SessionVariable.java | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/en/docs/advanced/variables.md b/docs/en/docs/advanced/variables.md index 0fc218a1faa6ae..63052a8230cd59 100644 --- a/docs/en/docs/advanced/variables.md +++ b/docs/en/docs/advanced/variables.md @@ -365,7 +365,7 @@ Translated with www.DeepL.com/Translator (free version) * `query_timeout` - Used to set the query timeout. This variable applies to all query statements in the current connection. Particularly, timeout of INSERT statements is recommended to be managed by the insert_timeout below. The default is 5 minutes, in seconds. + Used to set the query timeout. This variable applies to all query statements in the current connection. Particularly, timeout of INSERT statements is recommended to be managed by the insert_timeout below. The default is 15 minutes, in seconds. * `insert_timeout` diff --git a/docs/en/docs/data-table/advance-usage.md b/docs/en/docs/data-table/advance-usage.md index 8f63c833e90624..aa8adfd147a98a 100644 --- a/docs/en/docs/data-table/advance-usage.md +++ b/docs/en/docs/data-table/advance-usage.md @@ -207,7 +207,7 @@ mysql> SHOW VARIABLES LIKE "%query_timeout%"; +---------------+-------+ | Variable_name | Value | +---------------+-------+ -| QUERY_TIMEOUT | 300 | +| QUERY_TIMEOUT | 900 | +---------------+-------+ 1 row in set (0.00 sec) ``` diff --git a/docs/zh-CN/docs/advanced/variables.md b/docs/zh-CN/docs/advanced/variables.md index bea3836ede9435..59d5ddad44a717 100644 --- a/docs/zh-CN/docs/advanced/variables.md +++ b/docs/zh-CN/docs/advanced/variables.md @@ -362,7 +362,7 @@ SELECT /*+ SET_VAR(query_timeout = 1, enable_partition_cache=true) */ sleep(3); - `query_timeout` - 用于设置查询超时。该变量会作用于当前连接中所有的查询语句,对于 INSERT 语句推荐使用insert_timeout。默认为 5 分钟,单位为秒。 + 用于设置查询超时。该变量会作用于当前连接中所有的查询语句,对于 INSERT 语句推荐使用insert_timeout。默认为 15 分钟,单位为秒。 - `insert_timeout` 用于设置针对 INSERT 语句的超时。该变量仅作用于 INSERT 语句,建议在 INSERT 行为易持续较长时间的场景下设置。默认为 4 小时,单位为秒。由于旧版本用户会通过延长 query_timeout 来防止 INSERT 语句超时,insert_timeout 在 query_timeout 大于自身的情况下将会失效, 以兼容旧版本用户的习惯。 diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index 590a87b3273284..5b9cb1b8075374 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -451,7 +451,7 @@ public class SessionVariable implements Serializable, Writable { // query timeout in second. @VariableMgr.VarAttr(name = QUERY_TIMEOUT) - public int queryTimeoutS = 300; + public int queryTimeoutS = 900; // The global max_execution_time value provides the default for the session value for new connections. // The session value applies to SELECT executions executed within the session that include From 990d6c02ec37ba4c57e15d598242c5669419db62 Mon Sep 17 00:00:00 2001 From: zhiqqqq Date: Sat, 16 Sep 2023 18:26:13 +0800 Subject: [PATCH 12/33] [Feature](new function) Add a uuid-numeric function, returns uuid in largerint type, 20x faster than uuid (#24395) --- .../vec/functions/simple_function_factory.h | 2 + be/src/vec/functions/uuid_numeric.cpp | 158 ++++++++++++++++++ .../numeric-functions/uuid_numeric.md | 51 ++++++ .../numeric-functions/uuid_numeric.md | 49 ++++++ .../doris/catalog/BuiltinScalarFunctions.java | 2 + .../functions/Nondeterministic.java | 1 - .../functions/scalar/UuidNumeric.java | 58 +++++++ .../visitor/ScalarFunctionVisitor.java | 5 + gensrc/script/doris_builtins_functions.py | 3 +- 9 files changed, 327 insertions(+), 2 deletions(-) create mode 100644 be/src/vec/functions/uuid_numeric.cpp create mode 100644 docs/en/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md create mode 100644 docs/zh-CN/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UuidNumeric.java diff --git a/be/src/vec/functions/simple_function_factory.h b/be/src/vec/functions/simple_function_factory.h index b5e72bdde631db..e0962b67187ad8 100644 --- a/be/src/vec/functions/simple_function_factory.h +++ b/be/src/vec/functions/simple_function_factory.h @@ -77,6 +77,7 @@ void register_function_like(SimpleFunctionFactory& factory); void register_function_regexp(SimpleFunctionFactory& factory); void register_function_random(SimpleFunctionFactory& factory); void register_function_uuid(SimpleFunctionFactory& factory); +void register_function_uuid_numeric(SimpleFunctionFactory& factory); void register_function_coalesce(SimpleFunctionFactory& factory); void register_function_grouping(SimpleFunctionFactory& factory); void register_function_datetime_floor_ceil(SimpleFunctionFactory& factory); @@ -253,6 +254,7 @@ class SimpleFunctionFactory { register_function_regexp(instance); register_function_random(instance); register_function_uuid(instance); + register_function_uuid_numeric(instance); register_function_coalesce(instance); register_function_grouping(instance); register_function_datetime_floor_ceil(instance); diff --git a/be/src/vec/functions/uuid_numeric.cpp b/be/src/vec/functions/uuid_numeric.cpp new file mode 100644 index 00000000000000..d41912b20e86aa --- /dev/null +++ b/be/src/vec/functions/uuid_numeric.cpp @@ -0,0 +1,158 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include + +#include "common/status.h" +#include "runtime/large_int_value.h" +#include "vec/columns/column_vector.h" +#include "vec/columns/columns_number.h" +#include "vec/common/hash_table/hash.h" +#include "vec/common/sip_hash.h" +#include "vec/common/uint128.h" +#include "vec/core/block.h" +#include "vec/core/types.h" +#include "vec/data_types/data_type_number.h" +#include "vec/functions/function.h" +#include "vec/functions/simple_function_factory.h" + +namespace doris { +class FunctionContext; +} // namespace doris + +namespace doris::vectorized { + +// NOTE: +// The implementatin of random generator is inspired by the RandImpl::execute of ClickHouse. +// The ClickHouse RandImpl::execute function provided valuable insights and ideas for the development process. + +struct LinearCongruentialGenerator { + /// Constants from `man lrand48_r`. + static constexpr UInt64 a = 0x5DEECE66D; + static constexpr UInt64 c = 0xB; + + /// And this is from `head -c8 /dev/urandom | xxd -p` + UInt64 current = 0xbcabbed75dfe77cdLL; + + void seed(UInt64 value) { current = value; } + + UInt32 next() { + current = current * a + c; + return static_cast(current >> 16); + } +}; + +UInt64 calcSeed(UInt64 rand_seed, UInt64 additional_seed) { + return int_hash64(rand_seed ^ int_hash64(additional_seed)); +} + +void seed(LinearCongruentialGenerator& generator, UInt64 rand_seed, intptr_t additional_seed) { + generator.seed(calcSeed(rand_seed, additional_seed)); +} + +/// The array of random numbers from 'head -c8 /dev/urandom | xxd -p'. +/// Can be used for creating seeds for random generators. +constexpr std::array random_numbers = { + 0x62224b4e764e1560ULL, 0xa79ec6fdbb2ef873ULL, 0xe2862f147d1c0649ULL, 0xc8d47f9a38554cb2ULL, + 0x62b0dd532dcd8a43ULL, 0xef3128a01e7a28bcULL, 0x32e4eb5461fc0f6ULL, 0xd3377ce32d3d9579ULL, + 0x6f129aa32529a57cULL, 0x98dd0ba25301a5a3ULL, 0x457bd29769afabf1ULL, 0x3bb886ea86263d9dULL, + 0xec3e9514dc0bb543ULL, 0x84282031a89ce23eULL, 0x55212b07d1a9a765ULL, 0xe9de69f882aa48afULL, + 0x13a71c9baa9babbbULL, 0x3b7be8b0dd9cb586ULL, 0x1375e8cb773f3e35ULL, 0x9f841693b13e615fULL, + 0xab62458b90fd9aefULL, 0xa9d9fdd187f8e941ULL, 0xca1851150f831eeaULL, 0xa43f586f9078e918ULL, + 0xe336c2883038a257ULL, 0xfebaffc035561545ULL, 0x27c2436d2607840eULL, 0x21bab1489b0ff552ULL, + 0x22ca273c2756bb6cULL, 0x4b6260e129af35f1ULL, 0xeb42b6c0d4322c6fULL, 0xfea0f49cc4e68339ULL, +}; + +class UuidNumeric : public IFunction { +public: + static constexpr auto name = "uuid_numeric"; + static constexpr size_t uuid_length = 16; // Int128 + + static FunctionPtr create() { return std::make_shared(); } + + String get_name() const override { return name; } + + bool use_default_implementation_for_constants() const override { return false; } + + size_t get_number_of_arguments() const override { return 0; } + + bool is_variadic() const override { return false; } + + // uuid numeric is a Int128 (maybe UInt128 is better but we do not support it now + DataTypePtr get_return_type_impl(const DataTypes& arguments) const override { + return std::make_shared(); + } + + // TODO(zhiqiang): May be override open function? + + Status execute_impl(FunctionContext* /*context*/, Block& block, + const ColumnNumbers& /*arguments*/, size_t result, + size_t input_rows_count) override { + auto col_res = ColumnInt128::create(); + col_res->resize(input_rows_count); + + GenerateUUIDs(reinterpret_cast(col_res->get_data().data()), + uuid_length * input_rows_count); + + block.replace_by_position(result, std::move(col_res)); + return Status::OK(); + } + +private: + void GenerateUUIDs(char* output, size_t size) { + LinearCongruentialGenerator generator0; + LinearCongruentialGenerator generator1; + LinearCongruentialGenerator generator2; + LinearCongruentialGenerator generator3; + + UInt64 rand_seed = randomSeed(); + + seed(generator0, rand_seed, random_numbers[0] + reinterpret_cast(output)); + seed(generator1, rand_seed, random_numbers[1] + reinterpret_cast(output)); + seed(generator2, rand_seed, random_numbers[2] + reinterpret_cast(output)); + seed(generator3, rand_seed, random_numbers[3] + reinterpret_cast(output)); + + for (const char* end = output + size; output < end; output += 16) { + unaligned_store(output, generator0.next()); + unaligned_store(output + 4, generator1.next()); + unaligned_store(output + 8, generator2.next()); + unaligned_store(output + 12, generator3.next()); + } + /// It is guaranteed (by PaddedPODArray) that we can overwrite up to 15 bytes after end. + } + + UInt64 randomSeed() { + struct timespec times; + + /// Not cryptographically secure as time, pid and stack address can be predictable. + + SipHash hash; + hash.update(times.tv_nsec); + hash.update(times.tv_sec); + hash.update((uintptr_t)pthread_self()); + + return hash.get64(); + } +}; + +void register_function_uuid_numeric(SimpleFunctionFactory& factory) { + factory.register_function(); +} + +} // namespace doris::vectorized diff --git a/docs/en/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md b/docs/en/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md new file mode 100644 index 00000000000000..07d7ec3f9c58a4 --- /dev/null +++ b/docs/en/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md @@ -0,0 +1,51 @@ +--- +{ + "title": "uuid_numeric", + "language": "en" +} +--- + + + +## uuid_numeric +### description +#### Syntax + +`LARGEINT uuid_numeric()` + +Return a uuid in type `LARGEINT`. + +Note that `LARGEINT` has type Int128, so we could get a negative number from `uuid_numeric()`. + +### example + +``` + +mysql> select uuid_numeric(); ++----------------------------------------+ +| uuid_numeric() | ++----------------------------------------+ +| 82218484683747862468445277894131281464 | ++----------------------------------------+ +``` + +### keywords + + UUID UUID-NUMERIC diff --git a/docs/zh-CN/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md b/docs/zh-CN/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md new file mode 100644 index 00000000000000..227bd6b7d35d06 --- /dev/null +++ b/docs/zh-CN/docs/sql-manual/sql-functions/numeric-functions/uuid_numeric.md @@ -0,0 +1,49 @@ +--- +{ + "title": "uuid_numeric", + "language": "zh-CN" +} +--- + + + +## uuid_numeric +### description +#### Syntax + +`LARGEINT uuid_numeric()` + +返回一个 `LARGEINT` 类型的 `uuid`。注意 `LARGEINT` 是一个 Int128,所以 `uuid_numeric()` 可能会得到负值。 + +### example + +``` + +mysql> select uuid_numeric(); ++----------------------------------------+ +| uuid_numeric() | ++----------------------------------------+ +| 82218484683747862468445277894131281464 | ++----------------------------------------+ +``` + +### keywords + + UUID UUID-NUMERIC diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/BuiltinScalarFunctions.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/BuiltinScalarFunctions.java index d65f84d8763ac9..1a2d064d2b16f4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/BuiltinScalarFunctions.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/BuiltinScalarFunctions.java @@ -345,6 +345,7 @@ import org.apache.doris.nereids.trees.expressions.functions.scalar.User; import org.apache.doris.nereids.trees.expressions.functions.scalar.UtcTimestamp; import org.apache.doris.nereids.trees.expressions.functions.scalar.Uuid; +import org.apache.doris.nereids.trees.expressions.functions.scalar.UuidNumeric; import org.apache.doris.nereids.trees.expressions.functions.scalar.Version; import org.apache.doris.nereids.trees.expressions.functions.scalar.Week; import org.apache.doris.nereids.trees.expressions.functions.scalar.WeekCeil; @@ -703,6 +704,7 @@ public class BuiltinScalarFunctions implements FunctionHelper { scalar(User.class, "user"), scalar(UtcTimestamp.class, "utc_timestamp"), scalar(Uuid.class, "uuid"), + scalar(UuidNumeric.class, "uuid_numeric"), scalar(Version.class, "version"), scalar(Week.class, "week"), scalar(WeekCeil.class, "week_ceil"), diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Nondeterministic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Nondeterministic.java index 8fd633574095bf..88955c0c4da07c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Nondeterministic.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Nondeterministic.java @@ -24,7 +24,6 @@ * * e.g. 'rand()', 'random()'. * - * note: no 'uuid' function currently. */ public interface Nondeterministic extends ExpressionTrait { @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UuidNumeric.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UuidNumeric.java new file mode 100644 index 00000000000000..3e2267b4370c74 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UuidNumeric.java @@ -0,0 +1,58 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions.scalar; + +import org.apache.doris.catalog.FunctionSignature; +import org.apache.doris.nereids.trees.expressions.functions.AlwaysNotNullable; +import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; +import org.apache.doris.nereids.trees.expressions.functions.Nondeterministic; +import org.apache.doris.nereids.trees.expressions.shape.LeafExpression; +import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; +import org.apache.doris.nereids.types.LargeIntType; + +import com.google.common.collect.ImmutableList; + +import java.util.List; + +/** + * ScalarFunction 'uuid_numeric'. This class is generated by GenerateFunction. + */ +public class UuidNumeric extends ScalarFunction + implements LeafExpression, ExplicitlyCastableSignature, Nondeterministic, AlwaysNotNullable { + + public static final List SIGNATURES = ImmutableList.of( + FunctionSignature.ret(LargeIntType.INSTANCE).args() + ); + + /** + * constructor with 0 argument. + */ + public UuidNumeric() { + super("uuid_numeric"); + } + + @Override + public R accept(ExpressionVisitor visitor, C context) { + return visitor.visitUuidNumeric(this, context); + } + + @Override + public List getSignatures() { + return SIGNATURES; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/visitor/ScalarFunctionVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/visitor/ScalarFunctionVisitor.java index 7f3d50322bc4af..75c6644a4d3ab6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/visitor/ScalarFunctionVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/visitor/ScalarFunctionVisitor.java @@ -341,6 +341,7 @@ import org.apache.doris.nereids.trees.expressions.functions.scalar.User; import org.apache.doris.nereids.trees.expressions.functions.scalar.UtcTimestamp; import org.apache.doris.nereids.trees.expressions.functions.scalar.Uuid; +import org.apache.doris.nereids.trees.expressions.functions.scalar.UuidNumeric; import org.apache.doris.nereids.trees.expressions.functions.scalar.Version; import org.apache.doris.nereids.trees.expressions.functions.scalar.Week; import org.apache.doris.nereids.trees.expressions.functions.scalar.WeekCeil; @@ -1626,6 +1627,10 @@ default R visitUuid(Uuid uuid, C context) { return visitScalarFunction(uuid, context); } + default R visitUuidNumeric(UuidNumeric uuidNumeric, C context) { + return visitScalarFunction(uuidNumeric, context); + } + default R visitVersion(Version version, C context) { return visitScalarFunction(version, context); } diff --git a/gensrc/script/doris_builtins_functions.py b/gensrc/script/doris_builtins_functions.py index 0760064087eb71..bde00145af8cff 100644 --- a/gensrc/script/doris_builtins_functions.py +++ b/gensrc/script/doris_builtins_functions.py @@ -1977,7 +1977,8 @@ ], "UUID": [ - [['uuid'], 'VARCHAR', [], 'ALWAYS_NOT_NULLABLE'] + [['uuid'], 'VARCHAR', [], 'ALWAYS_NOT_NULLABLE'], + [['uuid_numeric'], 'LARGEINT', [], 'ALWAYS_NOT_NULLABLE'] ], #ip functions From de50fb5a462e1cd98a4fedcfa5705ba3e00418c7 Mon Sep 17 00:00:00 2001 From: xy720 <22125576+xy720@users.noreply.github.com> Date: Sat, 16 Sep 2023 18:39:11 +0800 Subject: [PATCH 13/33] [enhancement](Tablet) rename pathHashToDishInfoRef to pathHashToDiskInfoRef (#24311) --- .../src/main/java/org/apache/doris/catalog/Tablet.java | 2 +- .../java/org/apache/doris/system/SystemInfoService.java | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Tablet.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Tablet.java index 8b3bbe9ae72d95..3be373f21e7bf5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Tablet.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Tablet.java @@ -563,7 +563,7 @@ public Pair getHealthStatusWithPriority(S // 3. replica is under relocating if (stable < replicationNum) { - List replicaBeIds = replicas.stream().map(Replica::getBackendId).collect(Collectors.toList()); + Set replicaBeIds = replicas.stream().map(Replica::getBackendId).collect(Collectors.toSet()); List availableBeIds = aliveBeIds.stream().filter(systemInfoService::checkBackendScheduleAvailable) .collect(Collectors.toList()); if (replicaBeIds.containsAll(availableBeIds) diff --git a/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java b/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java index 5cc6b36f87d082..9cf9f5ad27988e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java +++ b/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java @@ -77,7 +77,7 @@ public class SystemInfoService { private volatile ImmutableMap idToBackendRef = ImmutableMap.of(); private volatile ImmutableMap idToReportVersionRef = ImmutableMap.of(); - private volatile ImmutableMap pathHashToDishInfoRef = ImmutableMap.of(); + private volatile ImmutableMap pathHashToDiskInfoRef = ImmutableMap.of(); public static class HostInfo implements Comparable { public String host; @@ -848,7 +848,7 @@ public long getBackendIdByHost(String host) { */ public Status checkExceedDiskCapacityLimit(Multimap bePathsMap, boolean floodStage) { LOG.debug("pathBeMap: {}", bePathsMap); - ImmutableMap pathHashToDiskInfo = pathHashToDishInfoRef; + ImmutableMap pathHashToDiskInfo = pathHashToDiskInfoRef; for (Long beId : bePathsMap.keySet()) { for (Long pathHash : bePathsMap.get(beId)) { DiskInfo diskInfo = pathHashToDiskInfo.get(pathHash); @@ -865,7 +865,7 @@ public Status checkExceedDiskCapacityLimit(Multimap bePathsMap, bool // update the path info when disk report // there is only one thread can update path info, so no need to worry about concurrency control public void updatePathInfo(List addedDisks, List removedDisks) { - Map copiedPathInfos = Maps.newHashMap(pathHashToDishInfoRef); + Map copiedPathInfos = Maps.newHashMap(pathHashToDiskInfoRef); for (DiskInfo diskInfo : addedDisks) { copiedPathInfos.put(diskInfo.getPathHash(), diskInfo); } @@ -873,7 +873,7 @@ public void updatePathInfo(List addedDisks, List removedDisk copiedPathInfos.remove(diskInfo.getPathHash()); } ImmutableMap newPathInfos = ImmutableMap.copyOf(copiedPathInfos); - pathHashToDishInfoRef = newPathInfos; + pathHashToDiskInfoRef = newPathInfos; LOG.debug("update path infos: {}", newPathInfos); } From b7a7a05eaaeae07a3e29d098ef77292ce62f2fc6 Mon Sep 17 00:00:00 2001 From: DeadlineFen <117912096+deadlinefen@users.noreply.github.com> Date: Sat, 16 Sep 2023 18:39:52 +0800 Subject: [PATCH 14/33] [UT](binlog) Add BinlogManager unit test #24486 add BinlogManager unit test add DBBinlog unit test add TableBinlog unit test --- .../doris/binlog/BinlogManagerTest.java | 382 ++++++++++++++++++ .../apache/doris/binlog/BinlogTestUtils.java | 76 ++++ .../org/apache/doris/binlog/DbBinlogTest.java | 307 ++++++++++++++ .../doris/binlog/MockBinlogConfigCache.java | 60 +++ .../apache/doris/binlog/TableBinlogTest.java | 142 +++++++ 5 files changed, 967 insertions(+) create mode 100644 fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogManagerTest.java create mode 100644 fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogTestUtils.java create mode 100644 fe/fe-core/src/test/java/org/apache/doris/binlog/DbBinlogTest.java create mode 100644 fe/fe-core/src/test/java/org/apache/doris/binlog/MockBinlogConfigCache.java create mode 100644 fe/fe-core/src/test/java/org/apache/doris/binlog/TableBinlogTest.java diff --git a/fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogManagerTest.java b/fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogManagerTest.java new file mode 100644 index 00000000000000..64a539a2b86613 --- /dev/null +++ b/fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogManagerTest.java @@ -0,0 +1,382 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.binlog; + +import org.apache.doris.catalog.BinlogConfig; +import org.apache.doris.catalog.Database; +import org.apache.doris.catalog.Env; +import org.apache.doris.common.Config; +import org.apache.doris.common.Pair; +import org.apache.doris.datasource.InternalCatalog; +import org.apache.doris.persist.BinlogGcInfo; +import org.apache.doris.thrift.TBinlog; +import org.apache.doris.thrift.TBinlogType; +import org.apache.doris.thrift.TStatus; +import org.apache.doris.thrift.TStatusCode; + +import com.google.common.collect.Maps; +import mockit.Mock; +import mockit.MockUp; +import org.apache.hadoop.util.Lists; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.List; +import java.util.Map; + +public class BinlogManagerTest { + private Map> frameWork; + + private int dbNum = 2; + private int tableNumPerDb = 3; + + private long dbBaseId = 10000; + private long tableBaseId = 100; + private long baseNum = 10000; + private long timeNow = baseNum; + private long ttl = 3; + + private boolean enableDbBinlog = false; + + @BeforeClass + public static void beforeClass() { + Config.enable_feature_binlog = true; + } + + @Before + public void setUp() { + Assert.assertTrue(tableNumPerDb < 100); + frameWork = Maps.newHashMap(); + for (int dbOff = 1; dbOff <= dbNum; ++dbOff) { + long dbId = dbOff * dbBaseId; + List tableIds = Lists.newArrayList(); + for (int tblOff = 1; tblOff <= tableNumPerDb; ++tblOff) { + tableIds.add(tableBaseId * tblOff + dbId); + } + frameWork.put(dbId, tableIds); + } + + new MockUp() { + @Mock + public BinlogConfig getDBBinlogConfig(long dbId) { + return new BinlogConfig(); + } + + @Mock + public BinlogConfig getTableBinlogConfig(long dbId, long tableId) { + return new BinlogConfig(); + } + + @Mock + public boolean isEnableTable(long dbId, long tableId) { + return true; + } + + @Mock + public boolean isEnableDB(long dbId) { + return enableDbBinlog; + } + }; + + new MockUp() { + @Mock + public long getTtlSeconds() { + return ttl; + } + + @Mock + public boolean isEnable() { + return enableDbBinlog; + } + }; + + new MockUp() { + @Mock + public InternalCatalog getCurrentInternalCatalog() { + return new InternalCatalog(); + } + }; + + new MockUp() { + @Mock + public Database getDbNullable(long dbId) { + return new Database(); + } + }; + + new MockUp() { + @Mock + public BinlogConfig getBinlogConfig() { + return new BinlogConfig(); + } + }; + } + + @Test + public void testGetBinlog() + throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + // reflect BinlogManager + Method addBinlog = BinlogManager.class.getDeclaredMethod("addBinlog", TBinlog.class); + addBinlog.setAccessible(true); + + // init binlog manager & addBinlog + BinlogManager manager = new BinlogManager(); + + // insert table binlogs + int binlogNum = 10; + for (int i = 1; i <= binlogNum; ++i) { + TBinlog binlog = BinlogTestUtils.newBinlog(dbBaseId, tableBaseId, i, i); + if (i % 2 == 0) { + binlog.setType(TBinlogType.CREATE_TABLE); + } + addBinlog.invoke(manager, binlog); + + } + + // test get + Pair pair; + + // get too old + pair = manager.getBinlog(dbBaseId, tableBaseId, -99); + Assert.assertEquals(TStatusCode.BINLOG_TOO_OLD_COMMIT_SEQ, pair.first.getStatusCode()); + Assert.assertEquals(TBinlogType.DUMMY, pair.second.getType()); + + // get odd commit seq in table level ok + pair = manager.getBinlog(dbBaseId, tableBaseId, 5); + Assert.assertEquals(TStatusCode.OK, pair.first.getStatusCode()); + Assert.assertEquals(5 + 2, pair.second.getCommitSeq()); + + // get even commit seq in table level ok + pair = manager.getBinlog(dbBaseId, tableBaseId, 6); + Assert.assertEquals(TStatusCode.OK, pair.first.getStatusCode()); + Assert.assertEquals(6 + 1, pair.second.getCommitSeq()); + + // get odd commit seq in db level ok + pair = manager.getBinlog(dbBaseId, -1, 5); + Assert.assertEquals(TStatusCode.OK, pair.first.getStatusCode()); + Assert.assertEquals(5 + 1, pair.second.getCommitSeq()); + + // get even commit seq in db level ok + pair = manager.getBinlog(dbBaseId, -1, 6); + Assert.assertEquals(TStatusCode.OK, pair.first.getStatusCode()); + Assert.assertEquals(6 + 1, pair.second.getCommitSeq()); + + // get too new + pair = manager.getBinlog(dbBaseId, tableBaseId, 999); + Assert.assertEquals(TStatusCode.BINLOG_TOO_NEW_COMMIT_SEQ, pair.first.getStatusCode()); + Assert.assertNull(pair.second); + } + + @Test + public void testPersist() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException, + IOException, NoSuchFieldException { + // reflect BinlogManager + // addBinlog method + Method addBinlog = BinlogManager.class.getDeclaredMethod("addBinlog", TBinlog.class); + addBinlog.setAccessible(true); + // dbBinlogMap + Field dbBinlogMapField = BinlogManager.class.getDeclaredField("dbBinlogMap"); + dbBinlogMapField.setAccessible(true); + + // init binlog manager & addBinlog + BinlogManager originManager = new BinlogManager(); + + // insert binlogs + long commitSeq = baseNum; + for (Map.Entry> dbEntry : frameWork.entrySet()) { + long dbId = dbEntry.getKey(); + for (long tableId : dbEntry.getValue()) { + addBinlog.invoke(originManager, BinlogTestUtils.newBinlog(dbId, tableId, commitSeq, commitSeq)); + ++commitSeq; + } + } + + // init output stream + ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream(); + DataOutputStream outputStream = new DataOutputStream(arrayOutputStream); + + // serialize binlogs + originManager.write(outputStream, 0L); + + // init another binlog manager + BinlogManager newManager = new BinlogManager(); + + // deserialize binlogs + ByteArrayInputStream arrayInputStream = new ByteArrayInputStream(arrayOutputStream.toByteArray()); + DataInputStream inputStream = new DataInputStream(arrayInputStream); + newManager.read(inputStream, 0L); + + // get origin & new dbbinlog's allbinlogs + Map originDbBinlogMap = (Map) dbBinlogMapField.get(originManager); + Map newDbBinlogMap = (Map) dbBinlogMapField.get(newManager); + Assert.assertEquals(originDbBinlogMap.size(), newDbBinlogMap.size()); + for (long dbId : frameWork.keySet()) { + List originBinlogList = Lists.newArrayList(); + List newBinlogList = Lists.newArrayList(); + originDbBinlogMap.get(dbId).getAllBinlogs(originBinlogList); + newDbBinlogMap.get(dbId).getAllBinlogs(newBinlogList); + Assert.assertEquals(originBinlogList.size(), newBinlogList.size()); + for (int i = 0; i < originBinlogList.size(); ++i) { + Assert.assertEquals(originBinlogList.get(i).getCommitSeq(), + newBinlogList.get(i).getCommitSeq()); + } + } + } + + @Test + public void testReplayGcFromTableLevel() throws NoSuchMethodException, InvocationTargetException, + IllegalAccessException, NoSuchFieldException { + // MockUp + new MockUp() { + @Mock + public long getExpiredMs(long ttl) { + return timeNow - ttl; + } + }; + + // reflect BinlogManager + // addBinlog method + Method addBinlog = BinlogManager.class.getDeclaredMethod("addBinlog", TBinlog.class); + addBinlog.setAccessible(true); + // dbBinlogMap + Field dbBinlogMapField = BinlogManager.class.getDeclaredField("dbBinlogMap"); + dbBinlogMapField.setAccessible(true); + + // init binlog origin & new manager + BinlogManager originManager = new BinlogManager(); + BinlogManager newManager = new BinlogManager(); + + // insert binlogs + long commitSeq = 0; + for (Map.Entry> dbEntry : frameWork.entrySet()) { + long dbId = dbEntry.getKey(); + for (long tableId : dbEntry.getValue()) { + if ((tableId / tableBaseId) % 2 != 0) { + addBinlog.invoke(originManager, BinlogTestUtils.newBinlog(dbId, tableId, commitSeq, timeNow)); + addBinlog.invoke(newManager, BinlogTestUtils.newBinlog(dbId, tableId, commitSeq, timeNow)); + ++commitSeq; + } else { + addBinlog.invoke(originManager, BinlogTestUtils.newBinlog(dbId, tableId, 0, 0)); + addBinlog.invoke(newManager, BinlogTestUtils.newBinlog(dbId, tableId, 0, 0)); + } + } + } + + // origin manager gc & get BinlogGcInfo + BinlogGcInfo info = new BinlogGcInfo(originManager.gc()); + + // new manager replay gc + newManager.replayGc(info); + + // get origin & new dbbinlog's allbinlogs + Map originDbBinlogMap = (Map) dbBinlogMapField.get(originManager); + Map newDbBinlogMap = (Map) dbBinlogMapField.get(newManager); + Assert.assertEquals(originDbBinlogMap.size(), newDbBinlogMap.size()); + for (long dbId : frameWork.keySet()) { + List originBinlogList = Lists.newArrayList(); + List newBinlogList = Lists.newArrayList(); + originDbBinlogMap.get(dbId).getAllBinlogs(originBinlogList); + newDbBinlogMap.get(dbId).getAllBinlogs(newBinlogList); + Assert.assertEquals(originBinlogList.size(), newBinlogList.size()); + for (int i = 0; i < originBinlogList.size(); ++i) { + TBinlog originBinlog = originBinlogList.get(i); + TBinlog newBinlog = newBinlogList.get(i); + Assert.assertEquals(originBinlog.getCommitSeq(), newBinlog.getCommitSeq()); + if (newBinlog.getType() != TBinlogType.DUMMY) { + Assert.assertTrue(newBinlog.getTimestamp() > timeNow - ttl); + } + } + } + } + + @Test + public void testReplayGcFromDbLevel() throws NoSuchMethodException, InvocationTargetException, + IllegalAccessException, NoSuchFieldException { + // MockUp + new MockUp() { + @Mock + public long getExpiredMs(long ttl) { + return timeNow - ttl; + } + }; + + // set dbBinlogEnable + enableDbBinlog = true; + + // reflect BinlogManager + // addBinlog method + Method addBinlog = BinlogManager.class.getDeclaredMethod("addBinlog", TBinlog.class); + addBinlog.setAccessible(true); + // dbBinlogMap + Field dbBinlogMapField = BinlogManager.class.getDeclaredField("dbBinlogMap"); + dbBinlogMapField.setAccessible(true); + + // init binlog origin & new manager + BinlogManager originManager = new BinlogManager(); + BinlogManager newManager = new BinlogManager(); + + // insert binlogs + long commitSeq = baseNum; + for (Map.Entry> dbEntry : frameWork.entrySet()) { + long dbId = dbEntry.getKey(); + for (long tableId : dbEntry.getValue()) { + ++commitSeq; + addBinlog.invoke(originManager, BinlogTestUtils.newBinlog(dbId, tableId, commitSeq, commitSeq)); + addBinlog.invoke(newManager, BinlogTestUtils.newBinlog(dbId, tableId, commitSeq, commitSeq)); + } + } + timeNow = commitSeq; + + // origin manager gc & get BinlogGcInfo + BinlogGcInfo info = new BinlogGcInfo(originManager.gc()); + + // new manager replay gc + newManager.replayGc(info); + + // get origin & new dbbinlog's allbinlogs + Map originDbBinlogMap = (Map) dbBinlogMapField.get(originManager); + Map newDbBinlogMap = (Map) dbBinlogMapField.get(newManager); + Assert.assertEquals(originDbBinlogMap.size(), newDbBinlogMap.size()); + for (Map.Entry> dbEntry : frameWork.entrySet()) { + long dbId = dbEntry.getKey(); + List originBinlogList = Lists.newArrayList(); + List newBinlogList = Lists.newArrayList(); + originDbBinlogMap.get(dbId).getAllBinlogs(originBinlogList); + newDbBinlogMap.get(dbId).getAllBinlogs(newBinlogList); + Assert.assertEquals(originBinlogList.size(), newBinlogList.size()); + for (int i = 0; i < originBinlogList.size(); ++i) { + TBinlog originBinlog = originBinlogList.get(i); + TBinlog newBinlog = newBinlogList.get(i); + Assert.assertEquals(originBinlog.getCommitSeq(), newBinlog.getCommitSeq()); + if (newBinlog.getType() != TBinlogType.DUMMY) { + Assert.assertTrue(newBinlog.getCommitSeq() > timeNow - ttl); + } + } + } + } +} diff --git a/fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogTestUtils.java b/fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogTestUtils.java new file mode 100644 index 00000000000000..af5eabfa3d6192 --- /dev/null +++ b/fe/fe-core/src/test/java/org/apache/doris/binlog/BinlogTestUtils.java @@ -0,0 +1,76 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.binlog; + +import org.apache.doris.catalog.BinlogConfig; +import org.apache.doris.thrift.TBinlog; +import org.apache.doris.thrift.TBinlogType; + +import com.google.common.collect.Maps; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class BinlogTestUtils { + + public static final long MAX_BYTES = 0x7fffffffffffffffL; + public static final long MAX_HISTORY_NUMS = 0x7fffffffffffffffL; + + public static BinlogConfig newTestBinlogConfig(boolean enableBinlog, long expiredTime) { + return new BinlogConfig(enableBinlog, expiredTime, MAX_BYTES, MAX_HISTORY_NUMS); + } + + public static BinlogConfigCache newMockBinlogConfigCache(long dbId, long tableId, long expiredTime) { + BinlogConfig binlogConfig = newTestBinlogConfig(true, expiredTime); + return new MockBinlogConfigCache( + Collections.singletonMap(String.format("%d_%d", dbId, tableId), binlogConfig)); + } + + public static MockBinlogConfigCache newMockBinlogConfigCache(Map ttlMap) { + Map configMap = Maps.newHashMap(); + for (Map.Entry entry : ttlMap.entrySet()) { + configMap.put(entry.getKey(), newTestBinlogConfig(true, entry.getValue())); + } + return new MockBinlogConfigCache(configMap); + } + + public static TBinlog newBinlog(long dbId, long tableId, long commitSeq, long timestamp) { + TBinlog binlog = new TBinlog(); + binlog.setDbId(dbId); + binlog.setTableIds(Collections.singletonList(tableId)); + binlog.setType(TBinlogType.ALTER_JOB); + binlog.setCommitSeq(commitSeq); + binlog.setTimestamp(timestamp); + binlog.setTableRef(0); + binlog.setBelong(-1); + return binlog; + } + + public static TBinlog newBinlog(long dbId, List tableIds, long commitSeq, long timestamp) { + TBinlog binlog = new TBinlog(); + binlog.setDbId(dbId); + binlog.setTableIds(tableIds); + binlog.setType(TBinlogType.ALTER_JOB); + binlog.setCommitSeq(commitSeq); + binlog.setTimestamp(timestamp); + binlog.setTableRef(0); + binlog.setBelong(-1); + return binlog; + } +} diff --git a/fe/fe-core/src/test/java/org/apache/doris/binlog/DbBinlogTest.java b/fe/fe-core/src/test/java/org/apache/doris/binlog/DbBinlogTest.java new file mode 100644 index 00000000000000..b57bde598e5ab6 --- /dev/null +++ b/fe/fe-core/src/test/java/org/apache/doris/binlog/DbBinlogTest.java @@ -0,0 +1,307 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.binlog; + +import org.apache.doris.thrift.TBinlog; +import org.apache.doris.thrift.TBinlogType; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import mockit.Mock; +import mockit.MockUp; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.lang.reflect.Field; +import java.util.List; +import java.util.Map; +import java.util.TreeSet; + +public class DbBinlogTest { + private long dbId = 10000L; + private long baseTableId = 20000L; + private int tableNum = 5; + private int gcTableNum = 2; + private List tableIds; + + private int totalBinlogNum = 10; + private int expiredBinlogNum = 3; + private long baseNum = 30000L; + + @Before + public void setUp() { + // check args valid + Assert.assertTrue(totalBinlogNum > 0); + Assert.assertTrue(gcTableNum <= tableNum); + Assert.assertTrue(expiredBinlogNum <= totalBinlogNum); + + // gen tableIds + tableIds = Lists.newArrayList(); + for (int i = 0; i < tableNum; ++i) { + tableIds.add(baseTableId + i); + } + + new MockUp() { + @Mock + public long getExpiredMs(long direct) { + return direct; + } + }; + } + + @Test + public void testTableTtlGcCommonCase() { + // init base data + long expiredTime = baseNum + expiredBinlogNum; + Map ttlMap = Maps.newHashMap(); + for (int i = 0; i < tableNum; ++i) { + String key = String.format("%d_%d", dbId, baseTableId + i); + if (i <= gcTableNum) { + ttlMap.put(key, expiredTime); + } else { + ttlMap.put(key, 0L); + } + } + MockBinlogConfigCache binlogConfigCache = BinlogTestUtils.newMockBinlogConfigCache(ttlMap); + binlogConfigCache.addDbBinlogConfig(dbId, false, 0L); + + // init & add binlogs + List testBinlogs = Lists.newArrayList(); + Long[] tableLastCommitInfo = new Long[tableNum]; + long maxGcTableId = baseTableId + gcTableNum; + long expiredCommitSeq = -1; + for (int i = 0; i < totalBinlogNum; ++i) { + long tableId = baseTableId + (i / tableNum); + long commitSeq = baseNum + i; + if (tableId <= maxGcTableId) { + expiredCommitSeq = commitSeq; + } + tableLastCommitInfo[i / tableNum] = commitSeq; + TBinlog binlog = BinlogTestUtils.newBinlog(dbId, tableId, commitSeq, baseNum); + testBinlogs.add(binlog); + } + + // init DbBinlog + DBBinlog dbBinlog = null; + + // insert binlogs + for (int i = 0; i < totalBinlogNum; ++i) { + if (dbBinlog == null) { + dbBinlog = new DBBinlog(binlogConfigCache, testBinlogs.get(i)); + } + dbBinlog.addBinlog(testBinlogs.get(i)); + } + + // trigger gc + BinlogTombstone tombstone = dbBinlog.gc(); + + // check binlog status + for (TBinlog binlog : testBinlogs) { + if (binlog.getTableIds().get(0) <= baseTableId + gcTableNum) { + Assert.assertEquals(0, binlog.getTableRef()); + } else { + Assert.assertEquals(1, binlog.getTableRef()); + } + } + + // check dummy binlog + List allBinlogs = Lists.newArrayList(); + dbBinlog.getAllBinlogs(allBinlogs); + for (TBinlog binlog : allBinlogs) { + if (binlog.getType() != TBinlogType.DUMMY) { + break; + } + long belong = binlog.getBelong(); + if (belong < 0) { + Assert.assertEquals(expiredCommitSeq, binlog.getCommitSeq()); + } else if (belong <= maxGcTableId) { + int offset = (int) (belong - baseTableId); + Assert.assertEquals((long) tableLastCommitInfo[offset], binlog.getCommitSeq()); + } else { + Assert.assertEquals(-1, binlog.getCommitSeq()); + } + } + + // check tombstone + Assert.assertFalse(tombstone.isDbBinlogTomstone()); + Assert.assertEquals(expiredCommitSeq, tombstone.getCommitSeq()); + } + + @Test + public void testTableTtlGcBinlogMultiRefCase() { + // init base data + long expiredTime = baseNum + expiredBinlogNum; + Map ttlMap = Maps.newHashMap(); + for (int i = 0; i < tableNum; ++i) { + String key = String.format("%d_%d", dbId, baseTableId + i); + if (i < tableNum - 1) { + ttlMap.put(key, expiredTime); + } else { + ttlMap.put(key, 0L); + } + } + MockBinlogConfigCache binlogConfigCache = BinlogTestUtils.newMockBinlogConfigCache(ttlMap); + binlogConfigCache.addDbBinlogConfig(dbId, false, 0L); + + // init & add binlogs + List testBinlogs = Lists.newArrayList(); + for (int i = 0; i < totalBinlogNum; ++i) { + // generate tableIds + long tableId = baseTableId + (i / (tableNum - 1)); + long additionalTableId = (long) (Math.random() * tableNum) + baseTableId; + while (tableId == additionalTableId) { + additionalTableId = (long) (Math.random() * tableNum) + baseTableId; + } + List tableIds = Lists.newArrayList(tableId, additionalTableId); + // init commitSeq + long commitSeq = baseNum + i; + + TBinlog binlog = BinlogTestUtils.newBinlog(dbId, tableIds, commitSeq, baseNum); + testBinlogs.add(binlog); + } + + // init dbBinlog + DBBinlog dbBinlog = null; + + // ad additional ref & add to dbBinlog + for (int i = 0; i < totalBinlogNum; ++i) { + TBinlog binlog = testBinlogs.get(i); + if (dbBinlog == null) { + dbBinlog = new DBBinlog(binlogConfigCache, binlog); + } + dbBinlog.addBinlog(binlog); + } + + // trigger gc + dbBinlog.gc(); + + // check binlog status + long unGcTableId = baseTableId + tableNum - 1; + for (TBinlog binlog : testBinlogs) { + if (binlog.getTableIds().contains(unGcTableId)) { + Assert.assertEquals(1, binlog.getTableRef()); + } else { + Assert.assertEquals(0, binlog.getTableRef()); + } + } + } + + @Test + public void testTableCommitSeqGc() { + // init base data + long expiredTime = baseNum + expiredBinlogNum; + Map ttlMap = Maps.newHashMap(); + MockBinlogConfigCache binlogConfigCache = BinlogTestUtils.newMockBinlogConfigCache(ttlMap); + binlogConfigCache.addDbBinlogConfig(dbId, true, expiredTime); + + // init & add binlogs + List testBinlogs = Lists.newArrayList(); + for (int i = 0; i < totalBinlogNum; ++i) { + // generate tableIds + long tableId = baseTableId + (i / (tableNum - 1)); + long additionalTableId = (long) (Math.random() * tableNum) + baseTableId; + while (tableId == additionalTableId) { + additionalTableId = (long) (Math.random() * tableNum) + baseTableId; + } + List tableIds = Lists.newArrayList(tableId, additionalTableId); + // init stamp + long stamp = baseNum + i; + + TBinlog binlog = BinlogTestUtils.newBinlog(dbId, tableIds, stamp, stamp); + testBinlogs.add(binlog); + } + + // init dbBinlog + DBBinlog dbBinlog = null; + + // ad additional ref & add to dbBinlog + for (int i = 0; i < totalBinlogNum; ++i) { + TBinlog binlog = testBinlogs.get(i); + if (dbBinlog == null) { + dbBinlog = new DBBinlog(binlogConfigCache, binlog); + } + dbBinlog.addBinlog(binlog); + } + + // trigger gc + dbBinlog.gc(); + + // check binlog status + for (TBinlog binlog : testBinlogs) { + if (binlog.getTimestamp() <= expiredTime) { + Assert.assertEquals(0, binlog.getTableRef()); + } else { + Assert.assertTrue(binlog.getTableRef() != 0); + } + } + } + + @Test + public void testAddBinlog() throws NoSuchFieldException, IllegalAccessException { + // set max value num + int maxValue = 12; + + // mock up + new MockUp() { + @Mock + boolean isEnableDB(long dbId) { + return true; + } + + @Mock + boolean isEnableTable(long dbId, long tableId) { + return true; + } + }; + + // reflect field + Field allBinlogsField = DBBinlog.class.getDeclaredField("allBinlogs"); + allBinlogsField.setAccessible(true); + Field tableBinlogMapField = DBBinlog.class.getDeclaredField("tableBinlogMap"); + tableBinlogMapField.setAccessible(true); + + + for (int i = 0; i <= maxValue; ++i) { + TBinlogType type = TBinlogType.findByValue(i); + if (type == TBinlogType.DUMMY) { + continue; + } + TBinlog binlog = BinlogTestUtils.newBinlog(dbId, baseTableId, 1, 1); + binlog.setType(type); + DBBinlog dbBinlog = new DBBinlog(new BinlogConfigCache(), binlog); + + dbBinlog.addBinlog(binlog); + + TreeSet allbinlogs = (TreeSet) allBinlogsField.get(dbBinlog); + Map tableBinlogMap = (Map) tableBinlogMapField.get(dbBinlog); + Assert.assertTrue(allbinlogs.contains(binlog)); + switch (type) { + case CREATE_TABLE: + case DROP_TABLE: { + Assert.assertTrue(tableBinlogMap.isEmpty()); + break; + } + default: { + Assert.assertTrue(tableBinlogMap.containsKey(baseTableId)); + break; + } + } + } + } +} diff --git a/fe/fe-core/src/test/java/org/apache/doris/binlog/MockBinlogConfigCache.java b/fe/fe-core/src/test/java/org/apache/doris/binlog/MockBinlogConfigCache.java new file mode 100644 index 00000000000000..4622171e9300b9 --- /dev/null +++ b/fe/fe-core/src/test/java/org/apache/doris/binlog/MockBinlogConfigCache.java @@ -0,0 +1,60 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.binlog; + +import org.apache.doris.catalog.BinlogConfig; + +import java.util.Map; + +final class MockBinlogConfigCache extends BinlogConfigCache { + private Map mockedConfigs; + + public MockBinlogConfigCache(Map mockedConfigs) { + super(); + this.mockedConfigs = mockedConfigs; + } + + public void addDbBinlogConfig(long dbId, boolean enableBinlog, long expiredTime) { + BinlogConfig config = BinlogTestUtils.newTestBinlogConfig(enableBinlog, expiredTime); + mockedConfigs.put(String.valueOf(dbId), config); + } + + @Override + public BinlogConfig getTableBinlogConfig(long dbId, long tableId) { + return mockedConfigs.get(String.format("%d_%d", dbId, tableId)); + } + + @Override + public BinlogConfig getDBBinlogConfig(long dbId) { + return mockedConfigs.get(String.valueOf(dbId)); + } + + @Override + public boolean isEnableTable(long dbId, long tableId) { + return mockedConfigs.containsKey(String.format("%d_%d", dbId, tableId)); + } + + @Override + public boolean isEnableDB(long dbId) { + BinlogConfig config = mockedConfigs.get(String.valueOf(dbId)); + if (config != null) { + return config.isEnable(); + } + return false; + } +} diff --git a/fe/fe-core/src/test/java/org/apache/doris/binlog/TableBinlogTest.java b/fe/fe-core/src/test/java/org/apache/doris/binlog/TableBinlogTest.java new file mode 100644 index 00000000000000..b4ecd8a90c5c73 --- /dev/null +++ b/fe/fe-core/src/test/java/org/apache/doris/binlog/TableBinlogTest.java @@ -0,0 +1,142 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.binlog; + +import org.apache.doris.thrift.TBinlog; + +import com.google.common.collect.Lists; +import mockit.Mock; +import mockit.MockUp; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.util.List; + +public class TableBinlogTest { + private long dbId = 10000; + private long tableId = 20000; + + private int totalBinlogNum = 10; + private int expiredBinlogNum = 3; + private long baseNum = 30000L; + + @Before + public void setUp() { + // check args valid + Assert.assertTrue(expiredBinlogNum <= totalBinlogNum); + } + + @Test + public void testTtlGc() { + // mock BinlogUtils + new MockUp() { + @Mock + public long getExpiredMs(long direct) { + return direct; + } + }; + + // init base data + long expiredTime = baseNum + expiredBinlogNum; + BinlogConfigCache binlogConfigCache = BinlogTestUtils.newMockBinlogConfigCache(dbId, tableId, expiredTime); + + // init & add binlogs + List testBinlogs = Lists.newArrayList(); + for (int i = 0; i < totalBinlogNum; ++i) { + TBinlog binlog = BinlogTestUtils.newBinlog(dbId, tableId, baseNum + i, baseNum + i); + testBinlogs.add(binlog); + } + + // init TableBinlog + TableBinlog tableBinlog = null; + + // insert binlogs + for (int i = 0; i < totalBinlogNum; ++i) { + if (tableBinlog == null) { + tableBinlog = new TableBinlog(binlogConfigCache, testBinlogs.get(i), dbId, tableId); + } + tableBinlog.addBinlog(testBinlogs.get(i)); + } + + // trigger ttlGc + BinlogTombstone tombstone = tableBinlog.ttlGc(); + + // check binlog status + for (TBinlog binlog : testBinlogs) { + if (binlog.getTimestamp() <= expiredTime) { + Assert.assertEquals(0, binlog.getTableRef()); + } else { + Assert.assertEquals(1, binlog.getTableRef()); + } + } + + // check tombstone + Assert.assertFalse(tombstone.isDbBinlogTomstone()); + Assert.assertEquals(expiredTime, tombstone.getCommitSeq()); + + // check dummy + TBinlog dummy = tableBinlog.getDummyBinlog(); + Assert.assertEquals(expiredTime, dummy.getCommitSeq()); + } + + @Test + public void testCommitSeqGc() { + // init base data + BinlogConfigCache binlogConfigCache = BinlogTestUtils.newMockBinlogConfigCache(dbId, tableId, 0); + + // init & add binlogs + List testBinlogs = Lists.newArrayList(); + for (int i = 0; i < totalBinlogNum; ++i) { + TBinlog binlog = BinlogTestUtils.newBinlog(dbId, tableId, baseNum + i, baseNum + i); + testBinlogs.add(binlog); + } + + // init TableBinlog + TableBinlog tableBinlog = null; + + // insert binlogs + for (int i = 0; i < totalBinlogNum; ++i) { + if (tableBinlog == null) { + tableBinlog = new TableBinlog(binlogConfigCache, testBinlogs.get(i), dbId, tableId); + } + tableBinlog.addBinlog(testBinlogs.get(i)); + } + + // trigger ttlGc + long expiredCommitSeq = baseNum + expiredBinlogNum; + BinlogTombstone tombstone = tableBinlog.commitSeqGc(expiredCommitSeq); + + // check binlog status + for (TBinlog binlog : testBinlogs) { + if (binlog.getTimestamp() <= expiredCommitSeq) { + Assert.assertEquals(0, binlog.getTableRef()); + } else { + Assert.assertEquals(1, binlog.getTableRef()); + } + } + + // check tombstone + Assert.assertFalse(tombstone.isDbBinlogTomstone()); + Assert.assertEquals(expiredCommitSeq, tombstone.getCommitSeq()); + + // check dummy + TBinlog dummy = tableBinlog.getDummyBinlog(); + Assert.assertEquals(expiredCommitSeq, dummy.getCommitSeq()); + } +} From dd2c4550578d687598d4727a815de3dc4cbddf2f Mon Sep 17 00:00:00 2001 From: Pxl Date: Sat, 16 Sep 2023 18:46:11 +0800 Subject: [PATCH 15/33] [Chore](checks) set sonar-project.properties (#24473) set sonar-project.properties --- sonar-project.properties | 22 ++++++++++++++++++++++ .github/workflows/sonarcloud.yml | 8 ++------ 2 files changed, 24 insertions(+), 6 deletions(-) create mode 100644 sonar-project.properties diff --git a/ sonar-project.properties b/ sonar-project.properties new file mode 100644 index 00000000000000..4d826c71ac965b --- /dev/null +++ b/ sonar-project.properties @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +sonar.host.url=https://sonarcloud.io +sonar.projectKey=apache_incubator-doris +sonar.organization=apache +sonar.sources=be diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index 67b3073701d319..d9728dcb344a6f 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -103,7 +103,7 @@ jobs: MAVEN_OPTS: -Xmx4g run: | cd fe - mvn --batch-mode verify sonar:sonar -DskipTests -Dsonar.host.url=https://sonarcloud.io -Dsonar.organization=apache -Dsonar.projectKey=apache_incubator-doris -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 + mvn --batch-mode verify sonar:sonar -DskipTests -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.httpconnectionManager.ttlSeconds=120 sonar-cloud-cpp: name: "SonarCloud on cpp" runs-on: ubuntu-22.04 @@ -154,9 +154,5 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: sonar-scanner \ - -Dsonar.cfamily.compile-commands=be/build_Release/compile_commands.json \ - -Dsonar.organization=apache \ - -Dsonar.projectKey=apache_incubator-doris \ - -Dsonar.sources=be + run: sonar-scanner -Dsonar.cfamily.compile-commands=be/build_Release/compile_commands.json -Dsonar.sources=be From a2efa650ec99b0616bb7a26c36da3530c80a2ef8 Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sat, 16 Sep 2023 20:29:49 +0800 Subject: [PATCH 16/33] [catalog lock](log) enable info log level on catalog lock (#24471) --- fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index d648776980b280..2c80226e3d2962 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -859,7 +859,7 @@ private boolean tryLock(boolean mustLock) { // to see which thread held this lock for long time. Thread owner = lock.getOwner(); if (owner != null) { - LOG.debug("catalog lock is held by: {}", Util.dumpThread(owner, 10)); + LOG.info("catalog lock is held by: {}", Util.dumpThread(owner, 10)); } } From 88adab311493c75801298c9603da1fb0f1224180 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B0=A2=E5=81=A5?= Date: Sat, 16 Sep 2023 20:39:15 +0800 Subject: [PATCH 17/33] [fix](Nereids): fix be core when array_map is not nullable (#24488) fix be core when array_map is not nullable --- .../lambda_function/varray_map_function.cpp | 65 +++-- .../java/org/apache/doris/analysis/Expr.java | 8 + .../analysis/LambdaFunctionCallExpr.java | 2 +- .../doris/analysis/LambdaFunctionExpr.java | 1 + .../glue/translator/ExpressionTranslator.java | 5 +- .../nereids/processor/post/Validator.java | 6 +- .../functions/scalar/ArrayMap.java | 10 +- .../expressions/functions/scalar/Lambda.java | 10 +- .../scalar_function/Array.out | 232 ++++++++++++++++++ .../scalar_function/Array.groovy | 18 ++ 10 files changed, 312 insertions(+), 45 deletions(-) diff --git a/be/src/vec/exprs/lambda_function/varray_map_function.cpp b/be/src/vec/exprs/lambda_function/varray_map_function.cpp index d32bb094f8240f..28971da75b1e9a 100644 --- a/be/src/vec/exprs/lambda_function/varray_map_function.cpp +++ b/be/src/vec/exprs/lambda_function/varray_map_function.cpp @@ -82,6 +82,7 @@ class ArrayMapFunction : public LambdaFunction { MutableColumnPtr array_column_offset; int nested_array_column_rows = 0; ColumnPtr first_array_offsets = nullptr; + //2. get the result column from executed expr, and the needed is nested column of array Block lambda_block; for (int i = 0; i < arguments.size(); ++i) { @@ -141,12 +142,6 @@ class ArrayMapFunction : public LambdaFunction { "R" + array_column_type_name.name}; lambda_block.insert(std::move(data_column)); } - //check nullable(array(nullable(nested))) - DCHECK(result_type->is_nullable() && - is_array(((DataTypeNullable*)result_type.get())->get_nested_type())) - << "array_map result type is error, now must be nullable(array): " - << result_type->get_name() - << " ,and block structure is: " << block->dump_structure(); //3. child[0]->execute(new_block) RETURN_IF_ERROR(children[0]->execute(context, &lambda_block, result_column_id)); @@ -158,40 +153,38 @@ class ArrayMapFunction : public LambdaFunction { //4. get the result column after execution, reassemble it into a new array column, and return. ColumnWithTypeAndName result_arr; - if (res_type->is_nullable()) { - result_arr = {ColumnNullable::create( - ColumnArray::create(res_col, std::move(array_column_offset)), - std::move(outside_null_map)), - result_type, res_name}; - + if (result_type->is_nullable()) { + if (res_type->is_nullable()) { + result_arr = {ColumnNullable::create( + ColumnArray::create(res_col, std::move(array_column_offset)), + std::move(outside_null_map)), + result_type, res_name}; + } else { + // deal with eg: select array_map(x -> x is null, [null, 1, 2]); + // need to create the nested column null map for column array + auto nested_null_map = ColumnUInt8::create(res_col->size(), 0); + result_arr = { + ColumnNullable::create( + ColumnArray::create( + ColumnNullable::create(res_col, std::move(nested_null_map)), + std::move(array_column_offset)), + std::move(outside_null_map)), + result_type, res_name}; + } } else { - // deal with eg: select array_map(x -> x is null, [null, 1, 2]); - // need to create the nested column null map for column array - auto nested_null_map = ColumnUInt8::create(res_col->size(), 0); - result_arr = {ColumnNullable::create( - ColumnArray::create(ColumnNullable::create( - res_col, std::move(nested_null_map)), - std::move(array_column_offset)), - std::move(outside_null_map)), - result_type, res_name}; + if (res_type->is_nullable()) { + result_arr = {ColumnArray::create(res_col, std::move(array_column_offset)), + result_type, res_name}; + } else { + auto nested_null_map = ColumnUInt8::create(res_col->size(), 0); + result_arr = {ColumnArray::create( + ColumnNullable::create(res_col, std::move(nested_null_map)), + std::move(array_column_offset)), + result_type, res_name}; + } } block->insert(std::move(result_arr)); *result_column_id = block->columns() - 1; - //check nullable(nested) - DCHECK((assert_cast( - (((DataTypeNullable*)result_type.get())->get_nested_type().get()))) - ->get_nested_type() - ->equals(*make_nullable(res_type))) - << " array_map function FE given result type is: " << result_type->get_name() - << " get nested is: " - << (assert_cast( - (((DataTypeNullable*)result_type.get())->get_nested_type().get()))) - ->get_nested_type() - ->get_name() - << " and now actual nested type after calculate " << res_type->get_name() - << " ,and block structure is: " << block->dump_structure() - << " ,and lambda_block structure is: " << lambda_block.dump_structure(); - return Status::OK(); } }; diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java index 1b583425e11bad..4f5c1e596c4e00 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java @@ -2355,6 +2355,14 @@ private static boolean customNullableAlgorithm(Function fn, List children) if (fn.functionName().equalsIgnoreCase("array_sortby")) { return children.get(0).isNullable(); } + if (fn.functionName().equalsIgnoreCase("array_map")) { + for (int i = 1; i < children.size(); ++i) { + if (children.get(i).isNullable()) { + return true; + } + } + return false; + } return true; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionCallExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionCallExpr.java index 2abee785811ccc..c8b0cbc9ab5b70 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionCallExpr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionCallExpr.java @@ -113,7 +113,7 @@ public void analyzeImpl(Analyzer analyzer) throws AnalysisException { + lambda.debugString()); } fn = new Function(fnName, Arrays.asList(argTypes), ArrayType.create(lambda.getChild(0).getType(), true), - true, true, NullableMode.DEPEND_ON_ARGUMENT); + true, true, NullableMode.CUSTOM); } else if (fnName.getFunction().equalsIgnoreCase("array_exists") || fnName.getFunction().equalsIgnoreCase("array_first_index") || fnName.getFunction().equalsIgnoreCase("array_last_index") diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionExpr.java index 434893d55407b7..3604e54226b246 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionExpr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LambdaFunctionExpr.java @@ -159,6 +159,7 @@ public ArrayList getSlotExprs() { return slotExpr; } + @Override public boolean isNullable() { for (int i = 1; i < slotExpr.size(); ++i) { if (slotExpr.get(i).isNullable()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/ExpressionTranslator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/ExpressionTranslator.java index 1f34e8c6d34fc2..d120a7ece4cf36 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/ExpressionTranslator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/ExpressionTranslator.java @@ -433,10 +433,13 @@ private Expr visitHighOrderFunction(ScalarFunction function, PlanTranslatorConte .map(Expression::getDataType) .map(DataType::toCatalogDataType) .forEach(argTypes::add); + NullableMode nullableMode = function.nullable() + ? NullableMode.ALWAYS_NULLABLE + : NullableMode.ALWAYS_NOT_NULLABLE; org.apache.doris.catalog.Function catalogFunction = new Function( new FunctionName(function.getName()), argTypes, ArrayType.create(lambda.getRetType().toCatalogDataType(), true), - true, true, NullableMode.DEPEND_ON_ARGUMENT); + true, true, nullableMode); // create catalog FunctionCallExpr without analyze again Expr lambdaBody = visitLambda(lambda, context); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/processor/post/Validator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/processor/post/Validator.java index ea24e0183b1dff..a46b454f9a147d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/processor/post/Validator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/processor/post/Validator.java @@ -19,9 +19,8 @@ import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.exceptions.AnalysisException; -import org.apache.doris.nereids.trees.expressions.MarkJoinSlotReference; import org.apache.doris.nereids.trees.expressions.Slot; -import org.apache.doris.nereids.trees.expressions.VirtualSlotReference; +import org.apache.doris.nereids.trees.expressions.SlotNotFromChildren; import org.apache.doris.nereids.trees.expressions.literal.BooleanLiteral; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.algebra.Aggregate; @@ -98,8 +97,7 @@ public static Optional checkAllSlotFromChildren(Plan plan) { .collect(Collectors.toSet()); Set inputSlots = plan.getInputSlots(); for (Slot slot : inputSlots) { - if (slot instanceof MarkJoinSlotReference || slot instanceof VirtualSlotReference || slot.getName() - .startsWith("mv")) { + if (slot.getName().startsWith("mv") || slot instanceof SlotNotFromChildren) { continue; } if (!(childOutputSet.contains(slot))) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/ArrayMap.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/ArrayMap.java index f152b7d386d89d..e64aff517fff7c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/ArrayMap.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/ArrayMap.java @@ -18,6 +18,7 @@ package org.apache.doris.nereids.trees.expressions.functions.scalar; import org.apache.doris.catalog.FunctionSignature; +import org.apache.doris.nereids.trees.expressions.ArrayItemReference; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullable; @@ -45,7 +46,7 @@ public class ArrayMap extends ScalarFunction /** * constructor with arguments. */ - public ArrayMap(Expression... arg) { + public ArrayMap(Expression arg) { super("array_map", arg); } @@ -68,6 +69,13 @@ public DataType getDataType() { return ArrayType.of(((Lambda) children.get(0)).getRetType(), true); } + @Override + public boolean nullable() { + return ((Lambda) children.get(0)).getLambdaArguments().stream() + .map(ArrayItemReference::getArrayExpression) + .anyMatch(Expression::nullable); + } + @Override public R accept(ExpressionVisitor visitor, C context) { return visitor.visitArrayMap(this, context); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java index e4df28d22d90f5..1d6c725db09d90 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java @@ -128,7 +128,10 @@ public boolean equals(Object o) { @Override public String toSql() { StringBuilder builder = new StringBuilder(); - String argStr = argumentNames.stream().collect(Collectors.joining(", ", "(", ")")); + String argStr = argumentNames.get(0); + if (argumentNames.size() > 1) { + argStr = argumentNames.stream().collect(Collectors.joining(", ", "(", ")")); + } builder.append(String.format("%s -> %s", argStr, getLambdaFunction().toString())); for (int i = 1; i < getArguments().size(); i++) { builder.append(", ").append(getArgument(i).toSql()); @@ -139,7 +142,10 @@ public String toSql() { @Override public String toString() { StringBuilder builder = new StringBuilder(); - String argStr = argumentNames.stream().collect(Collectors.joining(", ", "(", ")")); + String argStr = argumentNames.get(0); + if (argumentNames.size() > 1) { + argStr = argumentNames.stream().collect(Collectors.joining(", ", "(", ")")); + } builder.append(String.format("%s -> %s", argStr, getLambdaFunction().toString())); for (int i = 1; i < getArguments().size(); i++) { builder.append(", ").append(getArgument(i).toString()); diff --git a/regression-test/data/nereids_function_p0/scalar_function/Array.out b/regression-test/data/nereids_function_p0/scalar_function/Array.out index a8ae88dec53779..63128f0acef76b 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/Array.out +++ b/regression-test/data/nereids_function_p0/scalar_function/Array.out @@ -9714,3 +9714,235 @@ true \N \N +-- !sql_array_map_Double -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_Double_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_Float -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_Float_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_LargeInt -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_LargeInt_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_BigInt -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_BigInt_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_SmallInt -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_SmallInt_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_Integer -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_Integer_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_TinyInt -- +\N +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_TinyInt_notnull -- +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] +[1] + +-- !sql_array_map_DecimalV3 -- +\N +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] + +-- !sql_array_map_DecimalV3_notnull -- +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] +[1, 1] + diff --git a/regression-test/suites/nereids_function_p0/scalar_function/Array.groovy b/regression-test/suites/nereids_function_p0/scalar_function/Array.groovy index 507540e4c9fc13..f772f30338e73f 100644 --- a/regression-test/suites/nereids_function_p0/scalar_function/Array.groovy +++ b/regression-test/suites/nereids_function_p0/scalar_function/Array.groovy @@ -825,6 +825,24 @@ suite("nereids_scalar_fn_Array") { order_qt_sql_tokenize_String "select tokenize(kstr, '') from fn_test" order_qt_sql_tokenize_String_notnull "select tokenize(kstr, null) from fn_test_not_nullable" + // test array_map + order_qt_sql_array_map_Double "select array_map(x -> x is not null, kadbl) from fn_test" + order_qt_sql_array_map_Double_notnull "select array_map(x -> x is not null, kadbl) from fn_test_not_nullable" + order_qt_sql_array_map_Float "select array_map(x -> x is not null, kafloat) from fn_test" + order_qt_sql_array_map_Float_notnull "select array_map(x -> x is not null, kafloat) from fn_test_not_nullable" + order_qt_sql_array_map_LargeInt "select array_map(x -> x is not null, kalint) from fn_test" + order_qt_sql_array_map_LargeInt_notnull "select array_map(x -> x is not null, kalint) from fn_test_not_nullable" + order_qt_sql_array_map_BigInt "select array_map(x -> x is not null, kabint) from fn_test" + order_qt_sql_array_map_BigInt_notnull "select array_map(x -> x is not null, kabint) from fn_test_not_nullable" + order_qt_sql_array_map_SmallInt "select array_map(x -> x is not null, kasint) from fn_test" + order_qt_sql_array_map_SmallInt_notnull "select array_map(x -> x is not null, kasint) from fn_test_not_nullable" + order_qt_sql_array_map_Integer "select array_map(x -> x is not null, kaint) from fn_test" + order_qt_sql_array_map_Integer_notnull "select array_map(x -> x is not null, kaint) from fn_test_not_nullable" + order_qt_sql_array_map_TinyInt "select array_map(x -> x is not null, katint) from fn_test" + order_qt_sql_array_map_TinyInt_notnull "select array_map(x -> x is not null, katint) from fn_test_not_nullable" + order_qt_sql_array_map_DecimalV3 "select array_map(x -> x is not null, kadcml) from fn_test" + order_qt_sql_array_map_DecimalV3_notnull "select array_map(x -> x is not null, kadcml) from fn_test_not_nullable" + test { sql "select tokenize('arg1','xxx = yyy,zzz');" check{result, exception, startTime, endTime -> From 4b5cea1ef898ea20fdd6fd835e7461de3ef1ad46 Mon Sep 17 00:00:00 2001 From: daidai <2017501503@qq.com> Date: Sat, 16 Sep 2023 21:46:42 +0800 Subject: [PATCH 18/33] [enhancement](fix)change ordinary type null value is \N,complex type null value is null (#24207) --- .../serde/data_type_array_serde.cpp | 11 +- .../data_types/serde/data_type_array_serde.h | 7 +- .../serde/data_type_bitmap_serde.cpp | 11 +- .../data_types/serde/data_type_bitmap_serde.h | 7 +- .../serde/data_type_date64_serde.cpp | 24 +- .../data_types/serde/data_type_date64_serde.h | 14 +- .../serde/data_type_datetimev2_serde.cpp | 13 +- .../serde/data_type_datetimev2_serde.h | 7 +- .../serde/data_type_datev2_serde.cpp | 13 +- .../data_types/serde/data_type_datev2_serde.h | 7 +- .../serde/data_type_decimal_serde.cpp | 13 +- .../serde/data_type_decimal_serde.h | 7 +- .../serde/data_type_fixedlengthobject_serde.h | 7 +- .../data_types/serde/data_type_hll_serde.cpp | 8 +- .../data_types/serde/data_type_hll_serde.h | 7 +- .../serde/data_type_jsonb_serde.cpp | 8 +- .../data_types/serde/data_type_jsonb_serde.h | 7 +- .../data_types/serde/data_type_map_serde.cpp | 23 +- .../data_types/serde/data_type_map_serde.h | 7 +- .../serde/data_type_nullable_serde.cpp | 57 +- .../serde/data_type_nullable_serde.h | 7 +- .../serde/data_type_number_serde.cpp | 13 +- .../data_types/serde/data_type_number_serde.h | 7 +- .../data_types/serde/data_type_object_serde.h | 7 +- .../serde/data_type_quantilestate_serde.h | 7 +- be/src/vec/data_types/serde/data_type_serde.h | 25 +- .../serde/data_type_string_serde.cpp | 26 +- .../data_types/serde/data_type_string_serde.h | 7 +- .../serde/data_type_struct_serde.cpp | 15 +- .../data_types/serde/data_type_struct_serde.h | 7 +- .../serde/data_type_serde_text_test.cpp | 168 ++--- .../json_p0/test_json_load_and_function.out | 430 ++++++------- ...test_json_load_unique_key_and_function.out | 366 +++++------ .../jsonb_p0/test_jsonb_load_and_function.out | 578 +++++++++--------- ...est_jsonb_load_unique_key_and_function.out | 504 +++++++-------- .../test_map_load_and_function.out | 200 +++--- .../nereids_function_p0/agg_function/agg.out | 356 +++++------ .../nereids_function_p0/scalar_function/A.out | 4 +- .../scalar_function/Array.out | 10 +- .../nereids_function_p0/scalar_function/B.out | 4 +- .../nereids_function_p0/scalar_function/C.out | 18 +- .../nereids_function_p0/scalar_function/D.out | 4 +- .../nereids_function_p0/scalar_function/E.out | 6 +- .../nereids_function_p0/scalar_function/F.out | 8 +- .../nereids_function_p0/scalar_function/G.out | 4 +- .../nereids_function_p0/scalar_function/H.out | 4 +- .../nereids_function_p0/scalar_function/I.out | 6 +- .../nereids_function_p0/scalar_function/L.out | 20 +- .../nereids_function_p0/scalar_function/M.out | 28 +- .../nereids_function_p0/scalar_function/N.out | 12 +- .../nereids_function_p0/scalar_function/P.out | 2 +- .../nereids_function_p0/scalar_function/R.out | 24 +- .../nereids_function_p0/scalar_function/S.out | 20 +- .../nereids_function_p0/scalar_function/T.out | 6 +- .../nereids_function_p0/scalar_function/U.out | 12 +- .../window_function/window_fn.out | 248 ++++---- .../insert_into_table/aggregate.out | 14 +- .../insert_into_table/duplicate.out | 26 +- .../insert_into_table/no_partition.out | 48 +- .../nereids_p0/insert_into_table/unique.out | 28 +- .../pipeline/p0/conf/regression-conf.groovy | 2 +- 61 files changed, 1835 insertions(+), 1704 deletions(-) diff --git a/be/src/vec/data_types/serde/data_type_array_serde.cpp b/be/src/vec/data_types/serde/data_type_array_serde.cpp index 4c65f33285312a..15501aeaac2922 100644 --- a/be/src/vec/data_types/serde/data_type_array_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_array_serde.cpp @@ -64,16 +64,19 @@ void DataTypeArraySerDe::serialize_one_cell_to_json(const IColumn& column, int r Status DataTypeArraySerDe::deserialize_column_from_json_vector(IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } Status DataTypeArraySerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { if (slice.empty()) { return Status::InvalidArgument("slice is empty!"); } + auto& array_column = assert_cast(column); auto& offsets = array_column.get_offsets(); IColumn& nested_column = array_column.get_data(); @@ -130,8 +133,8 @@ Status DataTypeArraySerDe::deserialize_one_cell_from_json(IColumn& column, Slice } int elem_deserialized = 0; - Status st = nested_serde->deserialize_column_from_json_vector(nested_column, slices, - &elem_deserialized, options); + Status st = nested_serde->deserialize_column_from_json_vector( + nested_column, slices, &elem_deserialized, options, nesting_level + 1); offsets.emplace_back(offsets.back() + elem_deserialized); return st; } diff --git a/be/src/vec/data_types/serde/data_type_array_serde.h b/be/src/vec/data_types/serde/data_type_array_serde.h index 322b540d0c5bac..0f2aab7982132d 100644 --- a/be/src/vec/data_types/serde/data_type_array_serde.h +++ b/be/src/vec/data_types/serde/data_type_array_serde.h @@ -45,11 +45,12 @@ class DataTypeArraySerDe : public DataTypeSerDe { BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_one_cell_from_hive_text(IColumn& column, Slice& slice, const FormatOptions& options, int nesting_level = 1) const override; diff --git a/be/src/vec/data_types/serde/data_type_bitmap_serde.cpp b/be/src/vec/data_types/serde/data_type_bitmap_serde.cpp index 0e0c5d5068ccd0..e11bdfcea29172 100644 --- a/be/src/vec/data_types/serde/data_type_bitmap_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_bitmap_serde.cpp @@ -33,14 +33,17 @@ namespace doris { namespace vectorized { class IColumn; -Status DataTypeBitMapSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { +Status DataTypeBitMapSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { DESERIALIZE_COLUMN_FROM_JSON_VECTOR() return Status::OK(); } Status DataTypeBitMapSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& data_column = assert_cast(column); auto& data = data_column.get_data(); diff --git a/be/src/vec/data_types/serde/data_type_bitmap_serde.h b/be/src/vec/data_types/serde/data_type_bitmap_serde.h index 38feadb579f3a5..8e31be880a0190 100644 --- a/be/src/vec/data_types/serde/data_type_bitmap_serde.h +++ b/be/src/vec/data_types/serde/data_type_bitmap_serde.h @@ -46,11 +46,12 @@ class DataTypeBitMapSerDe : public DataTypeSerDe { } Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status write_column_to_pb(const IColumn& column, PValues& result, int start, int end) const override; diff --git a/be/src/vec/data_types/serde/data_type_date64_serde.cpp b/be/src/vec/data_types/serde/data_type_date64_serde.cpp index 8a260cff772566..4e19841054adb0 100644 --- a/be/src/vec/data_types/serde/data_type_date64_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_date64_serde.cpp @@ -61,15 +61,18 @@ void DataTypeDate64SerDe::serialize_one_cell_to_json(const IColumn& column, int } } -Status DataTypeDate64SerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() +Status DataTypeDate64SerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } Status DataTypeDate64SerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = assert_cast(column); Int64 val = 0; if (options.date_olap_format) { @@ -128,15 +131,18 @@ void DataTypeDateTimeSerDe::serialize_one_cell_to_json(const IColumn& column, in } } -Status DataTypeDateTimeSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { +Status DataTypeDateTimeSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { DESERIALIZE_COLUMN_FROM_JSON_VECTOR() return Status::OK(); } Status DataTypeDateTimeSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = assert_cast(column); Int64 val = 0; if (options.date_olap_format) { diff --git a/be/src/vec/data_types/serde/data_type_date64_serde.h b/be/src/vec/data_types/serde/data_type_date64_serde.h index e544dc3313b890..52afdcd65e6d5c 100644 --- a/be/src/vec/data_types/serde/data_type_date64_serde.h +++ b/be/src/vec/data_types/serde/data_type_date64_serde.h @@ -47,11 +47,12 @@ class DataTypeDate64SerDe : public DataTypeNumberSerDe { void serialize_column_to_json(const IColumn& column, int start_idx, int end_idx, BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; void write_column_to_arrow(const IColumn& column, const NullMap* null_map, arrow::ArrayBuilder* array_builder, int start, @@ -77,10 +78,11 @@ class DataTypeDateTimeSerDe : public DataTypeDate64SerDe { FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; }; } // namespace vectorized } // namespace doris \ No newline at end of file diff --git a/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp b/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp index 107c38e56065ad..a5ef23914e53d6 100644 --- a/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_datetimev2_serde.cpp @@ -57,14 +57,17 @@ void DataTypeDateTimeV2SerDe::serialize_one_cell_to_json(const IColumn& column, } } -Status DataTypeDateTimeV2SerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() +Status DataTypeDateTimeV2SerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } Status DataTypeDateTimeV2SerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = assert_cast(column); UInt64 val = 0; if (options.date_olap_format) { diff --git a/be/src/vec/data_types/serde/data_type_datetimev2_serde.h b/be/src/vec/data_types/serde/data_type_datetimev2_serde.h index c556cc69fcb078..ab2c3a30f60ce5 100644 --- a/be/src/vec/data_types/serde/data_type_datetimev2_serde.h +++ b/be/src/vec/data_types/serde/data_type_datetimev2_serde.h @@ -52,11 +52,12 @@ class DataTypeDateTimeV2SerDe : public DataTypeNumberSerDe { BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; void write_column_to_arrow(const IColumn& column, const NullMap* null_map, arrow::ArrayBuilder* array_builder, int start, diff --git a/be/src/vec/data_types/serde/data_type_datev2_serde.cpp b/be/src/vec/data_types/serde/data_type_datev2_serde.cpp index 31a411e417f9b5..0718b8e7985349 100644 --- a/be/src/vec/data_types/serde/data_type_datev2_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_datev2_serde.cpp @@ -49,15 +49,18 @@ void DataTypeDateV2SerDe::serialize_one_cell_to_json(const IColumn& column, int bw.write(buf, pos - buf - 1); } -Status DataTypeDateV2SerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() +Status DataTypeDateV2SerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } Status DataTypeDateV2SerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = assert_cast(column); UInt32 val = 0; if (options.date_olap_format) { diff --git a/be/src/vec/data_types/serde/data_type_datev2_serde.h b/be/src/vec/data_types/serde/data_type_datev2_serde.h index a7f687f37629c1..1fa3ee70b46fb0 100644 --- a/be/src/vec/data_types/serde/data_type_datev2_serde.h +++ b/be/src/vec/data_types/serde/data_type_datev2_serde.h @@ -48,11 +48,12 @@ class DataTypeDateV2SerDe : public DataTypeNumberSerDe { BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; void write_column_to_arrow(const IColumn& column, const NullMap* null_map, arrow::ArrayBuilder* array_builder, int start, diff --git a/be/src/vec/data_types/serde/data_type_decimal_serde.cpp b/be/src/vec/data_types/serde/data_type_decimal_serde.cpp index c7f6fd6472f796..e70e5d4d2caf16 100644 --- a/be/src/vec/data_types/serde/data_type_decimal_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_decimal_serde.cpp @@ -58,16 +58,19 @@ void DataTypeDecimalSerDe::serialize_one_cell_to_json(const IColumn& column, } template -Status DataTypeDecimalSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() +Status DataTypeDecimalSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } template Status DataTypeDecimalSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = assert_cast&>(column).get_data(); T val = {}; ReadBuffer rb(slice.data, slice.size); diff --git a/be/src/vec/data_types/serde/data_type_decimal_serde.h b/be/src/vec/data_types/serde/data_type_decimal_serde.h index 5aa8f365030353..d20ff4c4d56420 100644 --- a/be/src/vec/data_types/serde/data_type_decimal_serde.h +++ b/be/src/vec/data_types/serde/data_type_decimal_serde.h @@ -76,11 +76,12 @@ class DataTypeDecimalSerDe : public DataTypeSerDe { BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status write_column_to_pb(const IColumn& column, PValues& result, int start, int end) const override; diff --git a/be/src/vec/data_types/serde/data_type_fixedlengthobject_serde.h b/be/src/vec/data_types/serde/data_type_fixedlengthobject_serde.h index 376f451c475d50..80fee3dcbd98ce 100644 --- a/be/src/vec/data_types/serde/data_type_fixedlengthobject_serde.h +++ b/be/src/vec/data_types/serde/data_type_fixedlengthobject_serde.h @@ -48,14 +48,15 @@ class DataTypeFixedLengthObjectSerDe : public DataTypeSerDe { "serialize_column_to_text with type " + column.get_name()); } Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override { + const FormatOptions& options, + int nesting_level = 1) const override { return Status::NotSupported("deserialize_one_cell_from_text with type " + column.get_name()); } Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override { + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override { return Status::NotSupported("deserialize_column_from_text_vector with type " + column.get_name()); } diff --git a/be/src/vec/data_types/serde/data_type_hll_serde.cpp b/be/src/vec/data_types/serde/data_type_hll_serde.cpp index f0c9dfab02c6d3..e8cd09195fba51 100644 --- a/be/src/vec/data_types/serde/data_type_hll_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_hll_serde.cpp @@ -57,13 +57,15 @@ void DataTypeHLLSerDe::serialize_one_cell_to_json(const IColumn& column, int row Status DataTypeHLLSerDe::deserialize_column_from_json_vector(IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } Status DataTypeHLLSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& data_column = assert_cast(column); HyperLogLog hyper_log_log(slice); diff --git a/be/src/vec/data_types/serde/data_type_hll_serde.h b/be/src/vec/data_types/serde/data_type_hll_serde.h index 6b7354297c2fa8..3612e85b612637 100644 --- a/be/src/vec/data_types/serde/data_type_hll_serde.h +++ b/be/src/vec/data_types/serde/data_type_hll_serde.h @@ -38,10 +38,11 @@ class DataTypeHLLSerDe : public DataTypeSerDe { void serialize_column_to_json(const IColumn& column, int start_idx, int end_idx, BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status write_column_to_pb(const IColumn& column, PValues& result, int start, int end) const override; Status read_column_from_pb(IColumn& column, const PValues& arg) const override; diff --git a/be/src/vec/data_types/serde/data_type_jsonb_serde.cpp b/be/src/vec/data_types/serde/data_type_jsonb_serde.cpp index b47903399eb3a2..1194d549af12de 100644 --- a/be/src/vec/data_types/serde/data_type_jsonb_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_jsonb_serde.cpp @@ -77,13 +77,15 @@ void DataTypeJsonbSerDe::serialize_one_cell_to_json(const IColumn& column, int r Status DataTypeJsonbSerDe::deserialize_column_from_json_vector(IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } Status DataTypeJsonbSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { JsonBinaryValue value; RETURN_IF_ERROR(value.from_json_string(slice.data, slice.size)); diff --git a/be/src/vec/data_types/serde/data_type_jsonb_serde.h b/be/src/vec/data_types/serde/data_type_jsonb_serde.h index 9b4470dde87f8e..a3c716e6d89e6a 100644 --- a/be/src/vec/data_types/serde/data_type_jsonb_serde.h +++ b/be/src/vec/data_types/serde/data_type_jsonb_serde.h @@ -48,11 +48,12 @@ class DataTypeJsonbSerDe : public DataTypeStringSerDe { BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; private: template diff --git a/be/src/vec/data_types/serde/data_type_map_serde.cpp b/be/src/vec/data_types/serde/data_type_map_serde.cpp index a879e5d2d52136..41db3236dc6459 100644 --- a/be/src/vec/data_types/serde/data_type_map_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_map_serde.cpp @@ -165,13 +165,15 @@ void DataTypeMapSerDe::serialize_one_cell_to_hive_text(const IColumn& column, in Status DataTypeMapSerDe::deserialize_column_from_json_vector(IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { DESERIALIZE_COLUMN_FROM_JSON_VECTOR() return Status::OK(); } Status DataTypeMapSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { if (slice.empty()) { return Status::InvalidArgument("slice is empty!"); } @@ -231,15 +233,8 @@ Status DataTypeMapSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& } Slice next(slice.data + start_pos, idx - start_pos); next.trim_prefix(); - if (options.converted_from_string && - (next.starts_with("\"") || next.starts_with("'"))) { - next.remove_prefix(1); - } - if (options.converted_from_string && (next.ends_with("\"") || next.ends_with("'"))) { - next.remove_suffix(1); - } - if (Status st = - key_serde->deserialize_one_cell_from_json(nested_key_column, next, options); + if (Status st = key_serde->deserialize_one_cell_from_json(nested_key_column, next, + options, nesting_level + 1); !st.ok()) { nested_key_column.pop_back(elem_deserialized); nested_val_column.pop_back(elem_deserialized); @@ -257,7 +252,7 @@ Status DataTypeMapSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& next.trim_prefix(); if (Status st = value_serde->deserialize_one_cell_from_json(nested_val_column, next, - options); + options, nesting_level + 1); !st.ok()) { nested_key_column.pop_back(elem_deserialized + 1); nested_val_column.pop_back(elem_deserialized); @@ -275,8 +270,8 @@ Status DataTypeMapSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& Slice next(slice.data + start_pos, idx - start_pos); next.trim_prefix(); - if (Status st = - value_serde->deserialize_one_cell_from_json(nested_val_column, next, options); + if (Status st = value_serde->deserialize_one_cell_from_json(nested_val_column, next, + options, nesting_level + 1); !st.ok()) { nested_key_column.pop_back(elem_deserialized + 1); nested_val_column.pop_back(elem_deserialized); diff --git a/be/src/vec/data_types/serde/data_type_map_serde.h b/be/src/vec/data_types/serde/data_type_map_serde.h index 7bf0ca9a766ce9..54c269a0e5624e 100644 --- a/be/src/vec/data_types/serde/data_type_map_serde.h +++ b/be/src/vec/data_types/serde/data_type_map_serde.h @@ -44,11 +44,12 @@ class DataTypeMapSerDe : public DataTypeSerDe { void serialize_column_to_json(const IColumn& column, int start_idx, int end_idx, BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_one_cell_from_hive_text(IColumn& column, Slice& slice, const FormatOptions& options, diff --git a/be/src/vec/data_types/serde/data_type_nullable_serde.cpp b/be/src/vec/data_types/serde/data_type_nullable_serde.cpp index 722fb105d5babe..1a259644d7d92f 100644 --- a/be/src/vec/data_types/serde/data_type_nullable_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_nullable_serde.cpp @@ -60,10 +60,12 @@ void DataTypeNullableSerDe::serialize_one_cell_to_json(const IColumn& column, in } } -Status DataTypeNullableSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() +Status DataTypeNullableSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } @@ -116,7 +118,8 @@ Status DataTypeNullableSerDe::deserialize_column_from_hive_text_vector(IColumn& } Status DataTypeNullableSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& null_column = assert_cast(column); // TODO(Amory) make null literal configurable @@ -124,17 +127,55 @@ Status DataTypeNullableSerDe::deserialize_one_cell_from_json(IColumn& column, Sl // sure slice is from string parse , we can parse this "null" literal as string "null" to // nested column , otherwise we insert null to null column if (!(options.converted_from_string && slice.trim_quote())) { - if (slice.size == 4 && Slice::mem_equal("null", slice.data, 4)) { + /* + * For null values in ordinary types, we use \N to represent them; + * for null values in nested types, we use null to represent them, just like the json format. + * + * example: + * If you have three nullable columns + * a : int, b : string, c : map + * data: + * \N,hello world,\N + * 1,\N,{"cmake":2,"null":11} + * 9,"\N",{"\N":null,null:0} + * \N,"null",{null:null} + * null,null,null + * + * if you set trim_double_quotes = true + * you will get : + * NULL,hello world,NULL + * 1,NULL,{"cmake":2,"null":11} + * 9,\N,{"\N":NULL,NULL:0} + * NULL,null,{NULL:NULL} + * NULL,null,NULL + * + * if you set trim_double_quotes = false + * you will get : + * NULL,hello world,NULL + * 1,\N,{"cmake":2,"null":11} + * 9,"\N",{"\N":NULL,NULL:0} + * NULL,"null",{NULL:NULL} + * NULL,null,NULL + * + * in csv(text) for normal type: we only recognize \N for null , so + * for not char family type, like int, if we put null literal , + * it will parse fail, and make result null,not just because it equals \N. + * for char family type, like string, if we put null literal, it will parse success, + * and "null" literal will be stored in doris. + * + */ + if (nesting_level >= 2 && slice.size == 4 && slice[0] == 'n' && slice[1] == 'u' && + slice[2] == 'l' && slice[3] == 'l') { null_column.insert_data(nullptr, 0); return Status::OK(); - } else if (slice.size == 2 && slice[0] == '\\' && slice[1] == 'N') { + } else if (nesting_level == 1 && slice.size == 2 && slice[0] == '\\' && slice[1] == 'N') { null_column.insert_data(nullptr, 0); return Status::OK(); } } auto st = nested_serde->deserialize_one_cell_from_json(null_column.get_nested_column(), slice, - options); + options, nesting_level); if (!st.ok()) { // fill null if fail null_column.insert_data(nullptr, 0); // 0 is meaningless here diff --git a/be/src/vec/data_types/serde/data_type_nullable_serde.h b/be/src/vec/data_types/serde/data_type_nullable_serde.h index 5978a9f4a99262..9c5454cd834229 100644 --- a/be/src/vec/data_types/serde/data_type_nullable_serde.h +++ b/be/src/vec/data_types/serde/data_type_nullable_serde.h @@ -40,11 +40,12 @@ class DataTypeNullableSerDe : public DataTypeSerDe { void serialize_column_to_json(const IColumn& column, int start_idx, int end_idx, BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_one_cell_from_hive_text(IColumn& column, Slice& slice, const FormatOptions& options, diff --git a/be/src/vec/data_types/serde/data_type_number_serde.cpp b/be/src/vec/data_types/serde/data_type_number_serde.cpp index 3ba0890e425e4e..cd650c72492bd8 100644 --- a/be/src/vec/data_types/serde/data_type_number_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_number_serde.cpp @@ -101,7 +101,8 @@ void DataTypeNumberSerDe::write_column_to_arrow(const IColumn& column, const template Status DataTypeNumberSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = reinterpret_cast(column); ReadBuffer rb(slice.data, slice.size); if constexpr (std::is_same::value) { @@ -164,10 +165,12 @@ void DataTypeNumberSerDe::serialize_one_cell_to_json(const IColumn& column, i } template -Status DataTypeNumberSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { - DESERIALIZE_COLUMN_FROM_JSON_VECTOR() +Status DataTypeNumberSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { + DESERIALIZE_COLUMN_FROM_JSON_VECTOR(); return Status::OK(); } diff --git a/be/src/vec/data_types/serde/data_type_number_serde.h b/be/src/vec/data_types/serde/data_type_number_serde.h index fa831c2cd44535..6c9bc5ab4ae3b8 100644 --- a/be/src/vec/data_types/serde/data_type_number_serde.h +++ b/be/src/vec/data_types/serde/data_type_number_serde.h @@ -60,11 +60,12 @@ class DataTypeNumberSerDe : public DataTypeSerDe { void serialize_column_to_json(const IColumn& column, int start_idx, int end_idx, BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status write_column_to_pb(const IColumn& column, PValues& result, int start, int end) const override; diff --git a/be/src/vec/data_types/serde/data_type_object_serde.h b/be/src/vec/data_types/serde/data_type_object_serde.h index c93a7d9e038e76..2274e27c0358c4 100644 --- a/be/src/vec/data_types/serde/data_type_object_serde.h +++ b/be/src/vec/data_types/serde/data_type_object_serde.h @@ -48,13 +48,14 @@ class DataTypeObjectSerDe : public DataTypeSerDe { "serialize_column_to_text with type " + column.get_name()); } Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override { + const FormatOptions& options, + int nesting_level = 1) const override { return Status::NotSupported("deserialize_one_cell_from_text with type " + column.get_name()); } Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override { + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override { return Status::NotSupported("deserialize_column_from_text_vector with type " + column.get_name()); } diff --git a/be/src/vec/data_types/serde/data_type_quantilestate_serde.h b/be/src/vec/data_types/serde/data_type_quantilestate_serde.h index f51488cf32b1f5..081dff9315b382 100644 --- a/be/src/vec/data_types/serde/data_type_quantilestate_serde.h +++ b/be/src/vec/data_types/serde/data_type_quantilestate_serde.h @@ -51,14 +51,15 @@ class DataTypeQuantileStateSerDe : public DataTypeSerDe { "serialize_column_to_text with type " + column.get_name()); } Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override { + const FormatOptions& options, + int nesting_level = 1) const override { return Status::NotSupported("deserialize_one_cell_from_text with type " + column.get_name()); } Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override { + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override { return Status::NotSupported("deserialize_column_from_text_vector with type " + column.get_name()); } diff --git a/be/src/vec/data_types/serde/data_type_serde.h b/be/src/vec/data_types/serde/data_type_serde.h index 58d000925684a6..057eac975673f6 100644 --- a/be/src/vec/data_types/serde/data_type_serde.h +++ b/be/src/vec/data_types/serde/data_type_serde.h @@ -49,13 +49,13 @@ class time_zone; serialize_one_cell_to_json(column, i, bw, options); \ } -#define DESERIALIZE_COLUMN_FROM_JSON_VECTOR() \ - for (int i = 0; i < slices.size(); ++i) { \ - if (Status st = deserialize_one_cell_from_json(column, slices[i], options); \ - st != Status::OK()) { \ - return st; \ - } \ - ++*num_deserialized; \ +#define DESERIALIZE_COLUMN_FROM_JSON_VECTOR() \ + for (int i = 0; i < slices.size(); ++i) { \ + if (Status st = deserialize_one_cell_from_json(column, slices[i], options, nesting_level); \ + st != Status::OK()) { \ + return st; \ + } \ + ++*num_deserialized; \ } #define DESERIALIZE_COLUMN_FROM_HIVE_TEXT_VECTOR() \ @@ -168,23 +168,26 @@ class DataTypeSerDe { BufferWritable& bw, FormatOptions& options) const = 0; virtual Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const = 0; + const FormatOptions& options, + int nesting_level = 1) const = 0; // deserialize text vector is to avoid virtual function call in complex type nested loop virtual Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const = 0; + const FormatOptions& options, + int nesting_level = 1) const = 0; virtual Status deserialize_one_cell_from_hive_text(IColumn& column, Slice& slice, const FormatOptions& options, int nesting_level = 1) const { - return deserialize_one_cell_from_json(column, slice, options); + return deserialize_one_cell_from_json(column, slice, options, nesting_level); }; virtual Status deserialize_column_from_hive_text_vector(IColumn& column, std::vector& slices, int* num_deserialized, const FormatOptions& options, int nesting_level = 1) const { - return deserialize_column_from_json_vector(column, slices, num_deserialized, options); + return deserialize_column_from_json_vector(column, slices, num_deserialized, options, + nesting_level); }; virtual void serialize_one_cell_to_hive_text(const IColumn& column, int row_num, BufferWritable& bw, FormatOptions& options, diff --git a/be/src/vec/data_types/serde/data_type_string_serde.cpp b/be/src/vec/data_types/serde/data_type_string_serde.cpp index d3e6941fcdb02d..22bf5cabe5fe06 100644 --- a/be/src/vec/data_types/serde/data_type_string_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_string_serde.cpp @@ -50,9 +50,11 @@ void DataTypeStringSerDe::serialize_one_cell_to_json(const IColumn& column, int bw.write(value.data, value.size); } -Status DataTypeStringSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { +Status DataTypeStringSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { DESERIALIZE_COLUMN_FROM_JSON_VECTOR() return Status::OK(); } @@ -80,8 +82,24 @@ static void escape_string(const char* src, size_t& len, char escape_char) { } Status DataTypeStringSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { + const FormatOptions& options, + int nesting_level) const { auto& column_data = assert_cast(column); + + /* + * For strings in the json complex type, we remove double quotes by default. + * + * Because when querying complex types, such as selecting complexColumn from table, + * we will add double quotes to the strings in the complex type. + * + * For the map column, insert { "abc" : 1, "hello",2 }. + * If you do not remove the double quotes, it will display {""abc"":1,""hello"": 2 }, + * remove the double quotes to display { "abc" : 1, "hello",2 }. + * + */ + if (nesting_level >= 2) { + slice.trim_quote(); + } if (options.escape_char != 0) { escape_string(slice.data, slice.size, options.escape_char); } diff --git a/be/src/vec/data_types/serde/data_type_string_serde.h b/be/src/vec/data_types/serde/data_type_string_serde.h index 91301aa637ee2b..6d0a8a03412ce0 100644 --- a/be/src/vec/data_types/serde/data_type_string_serde.h +++ b/be/src/vec/data_types/serde/data_type_string_serde.h @@ -40,11 +40,12 @@ class DataTypeStringSerDe : public DataTypeSerDe { BufferWritable& bw, FormatOptions& options) const override; Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level) const override; Status write_column_to_pb(const IColumn& column, PValues& result, int start, int end) const override; diff --git a/be/src/vec/data_types/serde/data_type_struct_serde.cpp b/be/src/vec/data_types/serde/data_type_struct_serde.cpp index bc55fad060e20d..904cdc824b42b2 100644 --- a/be/src/vec/data_types/serde/data_type_struct_serde.cpp +++ b/be/src/vec/data_types/serde/data_type_struct_serde.cpp @@ -40,8 +40,11 @@ std::optional DataTypeStructSerDe::try_get_position_by_name(const String } Status DataTypeStructSerDe::deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const { - DCHECK(!slice.empty()); + const FormatOptions& options, + int nesting_level) const { + if (slice.empty()) { + return Status::InvalidArgument("slice is empty!"); + } auto& struct_column = assert_cast(column); if (slice[0] != '{') { @@ -166,9 +169,11 @@ Status DataTypeStructSerDe::deserialize_one_cell_from_json(IColumn& column, Slic return Status::OK(); } -Status DataTypeStructSerDe::deserialize_column_from_json_vector( - IColumn& column, std::vector& slices, int* num_deserialized, - const FormatOptions& options) const { +Status DataTypeStructSerDe::deserialize_column_from_json_vector(IColumn& column, + std::vector& slices, + int* num_deserialized, + const FormatOptions& options, + int nesting_level) const { DESERIALIZE_COLUMN_FROM_JSON_VECTOR() return Status::OK(); } diff --git a/be/src/vec/data_types/serde/data_type_struct_serde.h b/be/src/vec/data_types/serde/data_type_struct_serde.h index 5733f8b5ee33de..1f39375a1a3fd0 100644 --- a/be/src/vec/data_types/serde/data_type_struct_serde.h +++ b/be/src/vec/data_types/serde/data_type_struct_serde.h @@ -121,11 +121,12 @@ class DataTypeStructSerDe : public DataTypeSerDe { } Status deserialize_one_cell_from_json(IColumn& column, Slice& slice, - const FormatOptions& options) const override; + const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_column_from_json_vector(IColumn& column, std::vector& slices, - int* num_deserialized, - const FormatOptions& options) const override; + int* num_deserialized, const FormatOptions& options, + int nesting_level = 1) const override; Status deserialize_one_cell_from_hive_text(IColumn& column, Slice& slice, const FormatOptions& options, diff --git a/be/test/vec/data_types/serde/data_type_serde_text_test.cpp b/be/test/vec/data_types/serde/data_type_serde_text_test.cpp index cbe3336c53f089..56721649518142 100644 --- a/be/test/vec/data_types/serde/data_type_serde_text_test.cpp +++ b/be/test/vec/data_types/serde/data_type_serde_text_test.cpp @@ -304,16 +304,19 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { {"[\"hello\", \"world\"]", "['a', 'b', 'c']", "[\"42\",1412341,true,42.43,3.40282e+38+1,alpha:beta:gamma,Earth#42:" "Control#86:Bob#31,17:true:Abe " - "Linkedin,BLUE,\"\\N\",\"\u0001\u0002\u0003,\\u0001bc\"]"}, + "Linkedin,BLUE,\"\\N\",\"\u0001\u0002\u0003,\\u0001bc\"]", + "[\"heeeee\",null,\"NULL\",\"\\N\",null,\"sssssssss\"]"}, // last : ["42",1412341,true,42.43,3.40282e+38+1,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,"\N",",\u0001bc"] - {"[\"hello\", \"world\"]", "['a', 'b', 'c']", - "[\"42\", 1412341, true, 42.43, 3.40282e+38+1, alpha:beta:gamma, " - "Earth#42:Control#86:Bob#31, 17:true:Abe Linkedin, BLUE, \"\\N\", " - "\"\x1\x2\x3,\\u0001bc\"]"}, {"[hello, world]", "[a, b, c]", "[42, 1412341, true, 42.43, 3.40282e+38+1, alpha:beta:gamma, " "Earth#42:Control#86:Bob#31, 17:true:Abe Linkedin, BLUE, \\N, " - "\x1\x2\x3,\\u0001bc]"}), + "\x1\x2\x3,\\u0001bc]", + "[heeeee, NULL, NULL, \\N, NULL, sssssssss]"}, + {"[hello, world]", "[a, b, c]", + "[42, 1412341, true, 42.43, 3.40282e+38+1, alpha:beta:gamma, " + "Earth#42:Control#86:Bob#31, 17:true:Abe Linkedin, BLUE, \\N, " + "\x1\x2\x3,\\u0001bc]", + "[heeeee, NULL, NULL, \\N, NULL, sssssssss]"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_DATE, {"[\\\"2022-07-13\\\",\"2022-07-13 12:30:00\"]", @@ -324,11 +327,17 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { "[2022-07-13, 2022-07-13]"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_DATETIME, - {"[\"2022-07-13\",\"2022-07-13 12:30:00\"]", - "[2022-07-13 12:30:00, \"2022-07-13\", 2022-07-13 12:30:00.0000]"}, - {"[NULL, NULL]", "[2022-07-13 12:30:00, NULL, 2022-07-13 12:30:00]"}, + { + "[\"2022-07-13\",\"2022-07-13 12:30:00\"]", + "[2022-07-13 12:30:00, \"2022-07-13\", 2022-07-13 12:30:00.0000]", + "\\N", + "[null,null,null]", + }, + {"[NULL, NULL]", "[2022-07-13 12:30:00, NULL, 2022-07-13 12:30:00]", "NULL", + "[NULL, NULL, NULL]"}, {"[2022-07-13 00:00:00, 2022-07-13 12:30:00]", - "[2022-07-13 12:30:00, 2022-07-13 00:00:00, 2022-07-13 12:30:00]"}), + "[2022-07-13 12:30:00, 2022-07-13 00:00:00, 2022-07-13 12:30:00]", "NULL", + "[NULL, NULL, NULL]"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_DECIMAL, {"[4, 5.5, 6.67]", @@ -355,8 +364,8 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { auto type = std::get<0>(type_pair); DataTypePtr nested_data_type_ptr = DataTypeFactory::instance().create_data_type(type, 0, 0); - DataTypePtr array_data_type_ptr = - std::make_shared(make_nullable(nested_data_type_ptr)); + DataTypePtr array_data_type_ptr = make_nullable( + std::make_shared(make_nullable(nested_data_type_ptr))); std::cout << "========= This type is " << array_data_type_ptr->get_name() << ": " << fmt::format("{}", type) << std::endl; @@ -383,7 +392,9 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { if (expect_str == "[]") { if (st.ok()) { auto& item_column = assert_cast( - assert_cast(*col).get_data()); + assert_cast( + assert_cast(*col).get_nested_column()) + .get_data()); for (auto ix = 0; ix < item_column.size(); ++ix) { if (item_column.is_null_at(ix)) { std::cout << "idx null:" << ix << std::endl; @@ -430,7 +441,7 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { Status st = serde_1->deserialize_one_cell_from_json(*col3, slice, formatOptions); if (expect_str == "[]") { - EXPECT_EQ(st.ok(), false); + EXPECT_EQ(st.ok(), true); std::cout << st.to_json() << std::endl; } else { EXPECT_EQ(st.ok(), true); @@ -457,21 +468,29 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { FieldType::OLAP_FIELD_TYPE_STRING, {"{1: \"amory is 7\", 0: \" doris be better \", -1: \"wrong,\"}", "{\"1\": \"amory is 7\", \"0\": 1}"}, - {"{1:\"amory is 7\", 0:\" doris be better \", NULL:\"wrong,\"}", - "{NULL:\"amory is 7\", NULL:1}"}), + {"{1:amory is 7, 0: doris be better , NULL:wrong,}", + "{NULL:amory is 7, NULL:1}"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_STRING, FieldType::OLAP_FIELD_TYPE_DOUBLE, {"{\" ,.amory\": 111.2343, \"\": 112., 'dggs': 13.14 , NULL: 12.2222222, " ": NULL\\}", - "{\"\": NULL, null: 12.44}", "{{}}", "{{}", "}}", "{}, {}"}, - {"{\" ,.amory\":111.2343, \"\":112, 'dggs':13.14, NULL:12.2222222, :NULL}", - "{\"\":NULL, NULL:12.44}", "{}", "{}", "", "{}"}), + "{\"\": NULL, null: 12.44}", "{{}}", "{{}", "}}", "{}, {}", "\\N", + "{null:null,\"null\":null}", + "{\"hello " + "world\":0.2222222,\"hello2\":null,null:1111.1,\"NULL\":null,\"null\":" + "null,\"null\":0.1}"}, + {"{ ,.amory:111.2343, \"\":112, dggs:13.14, NULL:12.2222222, :NULL}", + "{\"\":NULL, NULL:12.44}", "{}", "{}", "NULL", "{}", "NULL", + "{NULL:NULL, null:NULL}", + "{hello world:0.2222222, hello2:NULL, NULL:1111.1, NULL:NULL, null:NULL, " + "null:0.1}"}), FieldType_RandStr(FieldType::OLAP_FIELD_TYPE_FLOAT, FieldType::OLAP_FIELD_TYPE_DOUBLE, {"{0.33: 3.1415926,3.1415926: 22}", "{3.14, 15926: 22}", "{3.14}", - "{222:3444},", "{4.12, 677: 455: 356, 67.6:67.7}"}, - {"{0.33:3.1415926, 3.1415925:22}", "{NULL:22}", "{}", "", - "{NULL:NULL, 67.6:67.7}"}), + "{222:3444},", "{4.12, 677: 455: 356, 67.6:67.7}", + "{null:null,null:1.0,1.0:null}"}, + {"{0.33:3.1415926, 3.1415925:22}", "{NULL:22}", "{}", "NULL", + "{NULL:NULL, 67.6:67.7}", "{NULL:NULL, NULL:1, 1:NULL}"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_DATE, FieldType::OLAP_FIELD_TYPE_DATETIME, {"{2022-07-13: 2022-07-13 12:30:00, 2022-07-13 12:30:00: 2022-07-13 " @@ -479,11 +498,13 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { "2022-07-13:'2022-07-13 12:30:00'}", // escaped char ':' "{2022-07-13 12\\:30\\:00: 2022-07-13, 2022-07-13 12\\:30\\:00.000: " - "2022-07-13 12:30:00.000, 2022-07-13:\'2022-07-13 12:30:00\'}"}, + "2022-07-13 12:30:00.000, 2022-07-13:\'2022-07-13 12:30:00\'}", + "\\N"}, {"{2022-07-13:2022-07-13 12:30:00, 2022-07-13:NULL, 2022-07-13:NULL, " "NULL:NULL, 2022-07-13:NULL}", "{2022-07-13:2022-07-13 00:00:00, 2022-07-13:2022-07-13 12:30:00, " - "2022-07-13:NULL}"}), + "2022-07-13:NULL}", + "NULL"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_DATETIME, FieldType::OLAP_FIELD_TYPE_DECIMAL, {"{2022-07-13 12:30:00: 12.45675432, 2022-07-13: 12.45675432, NULL: NULL}", @@ -504,8 +525,8 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { DataTypeFactory::instance().create_data_type(key_type, 0, 0); DataTypePtr nested_value_type_ptr = DataTypeFactory::instance().create_data_type(value_type, 0, 0); - DataTypePtr map_data_type_ptr = std::make_shared( - make_nullable(nested_key_type_ptr), make_nullable(nested_value_type_ptr)); + DataTypePtr map_data_type_ptr = make_nullable(std::make_shared( + make_nullable(nested_key_type_ptr), make_nullable(nested_value_type_ptr))); std::cout << "========= This type is " << map_data_type_ptr->get_name() << std::endl; @@ -633,17 +654,20 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { std::vector> FieldType_RandStr; std::vector nested_field_types = { - FieldType_RandStr(FieldType::OLAP_FIELD_TYPE_STRING, - {"[[Hello, World],[This, is, a, nested, array]]"}, - {"[[Hello, World], [This, is, a, nested, array]]"}, - {"[NULL, NULL, NULL, NULL, NULL, NULL, NULL]"}, - {"[[Hello, World], [This, is, a, nested, array]]"}), + FieldType_RandStr( + FieldType::OLAP_FIELD_TYPE_STRING, + {"[[Hello, World],[This, is, a, nested, array],null,[null,null,aaaa]]"}, + {"[[Hello, World], [This, is, a, nested, array], NULL, [NULL, NULL, " + "aaaa]]"}, + {"[NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL]"}, + {"[[Hello, World], [This, is, a, nested, array], NULL, [NULL, NULL, " + "aaaa]]"}), FieldType_RandStr( FieldType::OLAP_FIELD_TYPE_STRING, {"[[With, special, \"characters\"], [like, @, #, $, % \"^\", &, *, (, ), " "-, _], [=, +, [, ], {, }, |, \\, ;, :, ', '\', <, >, ,, ., /, ?, ~]]"}, - {"[[With, special, \"characters\"], [like, @, #, $, % \"^\", &, *, (, ), " - "-, _], [=, +, [, ], {, }, |, \\, ;, :, ', '\', <, >, ,, ., /, ?, ~]]"}, + {"[[With, special, characters], [like, @, #, $, % \"^\", &, *, (, ), -, " + "_], [=, +, [, ], {, }, |, \\, ;, :, ', '', <, >, ,, ., /, ?, ~]]"}, {""}, {"[[With, special, characters], [like, @, #, $, % \"^\", &, *, (, ), -, " "_], [=, +, [, ], {, }, |, \\, ;, :, ', '\', <, >, ,, ., /, ?, ~]]"})}; @@ -758,23 +782,23 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { "3050124830713523,\"mKH57V-YmwCNFq-vs8-vUIX\":0.36446683035480754},{\"HfhEMX-" "oAMBJCC-YIC-hCqN\":0.8131454631693608,\"xrnTFd-ikONWik-T7J-sL8J\":0." "37509722558990855,\"SVyEes-77mlzIr-N6c-DkYw\":0.4703053945053086," - "\"NULL\":0.1,\"\\N\":0.1}]"}, - {"[{\"2cKtIM-L1mOcEm-udR-HcB2\":0.23929040957798242, " - "\"eof2UN-Is0EEuA-H5D-hE58\":0.42373055809540094, " - "\"FwUSOB-R8rtK9W-BVG-8wYZ\":0.7680704548628841}, " - "{\"qDXU9D-7orr51d-g80-6t5k\":0.6446245786874659, " - "\"bkLjmx-uZ2Ez7F-536-PGqy\":0.8880791950937957, " - "\"9Etq4o-FPm37O4-5fk-QWh7\":0.08630489716260481}, " - "{\"tu3OMw-mzS0jAx-Dnj-Xm3G\":0.1184199213706042, " - "\"XkhTn0-QFLo8Ks-JXR-k4zk\":0.5181239375482816, " - "\"EYC8Dj-GTTp9iB-b4O-QBkO\":0.4491897722178303}, " - "{\"sHFGPg-cfA8gya-kfw-IugT\":0.20842299487398452, " - "\"BBQ6e5-OJYRJhC-zki-7rQj\":0.3050124830713523, " - "\"mKH57V-YmwCNFq-vs8-vUIX\":0.36446683035480754}, " - "{\"HfhEMX-oAMBJCC-YIC-hCqN\":0.8131454631693608, " - "\"xrnTFd-ikONWik-T7J-sL8J\":0.37509722558990855, " - "\"SVyEes-77mlzIr-N6c-DkYw\":0.4703053945053086, " - "\"NULL\":0.1, \"\\N\":0.1}]"}, + "\"NULL\":0.1,\"\\N\":0.1,null:null}, {NULL:0.1, NULL:NULL, \"NULL\":0}]"}, + {"[{2cKtIM-L1mOcEm-udR-HcB2:0.23929040957798242, " + "eof2UN-Is0EEuA-H5D-hE58:0.42373055809540094, " + "FwUSOB-R8rtK9W-BVG-8wYZ:0.7680704548628841}, " + "{qDXU9D-7orr51d-g80-6t5k:0.6446245786874659, " + "bkLjmx-uZ2Ez7F-536-PGqy:0.8880791950937957, " + "9Etq4o-FPm37O4-5fk-QWh7:0.08630489716260481}, " + "{tu3OMw-mzS0jAx-Dnj-Xm3G:0.1184199213706042, " + "XkhTn0-QFLo8Ks-JXR-k4zk:0.5181239375482816, " + "EYC8Dj-GTTp9iB-b4O-QBkO:0.4491897722178303}, " + "{sHFGPg-cfA8gya-kfw-IugT:0.20842299487398452, " + "BBQ6e5-OJYRJhC-zki-7rQj:0.3050124830713523, " + "mKH57V-YmwCNFq-vs8-vUIX:0.36446683035480754}, " + "{HfhEMX-oAMBJCC-YIC-hCqN:0.8131454631693608, " + "xrnTFd-ikONWik-T7J-sL8J:0.37509722558990855, " + "SVyEes-77mlzIr-N6c-DkYw:0.4703053945053086, NULL:0.1, \\N:0.1, NULL:NULL}, " + "{NULL:0.1, NULL:NULL, NULL:0}]"}, {""}, {"[{2cKtIM-L1mOcEm-udR-HcB2:0.23929040957798242, " "eof2UN-Is0EEuA-H5D-hE58:0.42373055809540094, " @@ -791,7 +815,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { "{HfhEMX-oAMBJCC-YIC-hCqN:0.8131454631693608, " "xrnTFd-ikONWik-T7J-sL8J:0.37509722558990855, " "SVyEes-77mlzIr-N6c-DkYw:0.4703053945053086, " - "NULL:0.1, NULL:0.1}]"})}; + "NULL:0.1, \\N:0.1, NULL:NULL}, {NULL:0.1, NULL:NULL, NULL:0}]"})}; for (auto type_pair : nested_field_types) { auto key_type = std::get<0>(type_pair); DataTypePtr nested_key_data_type_ptr = @@ -930,40 +954,40 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { "\"rlcnbo-tFg1FfP-ra6-D9Z8\":[0.7450713997349928,0.792502852203968,0." "9034039182796755,0.49131654565079996,0.25223293077647946,0.9827253462450637,0." "1684868582627418,0.0417161505112974,0.8498128570850716,0.8948779001812955]}"}, - {"{\"5Srn6n-SP9fOS3-khz-Ljwt\":[0.8537551959339321, 0.13473869413865858, " + {"{5Srn6n-SP9fOS3-khz-Ljwt:[0.8537551959339321, 0.13473869413865858, " "0.9806016478238296, 0.23014415892941564, 0.26853530959759686, " "0.05484935641143551, 0.11181328816302816, 0.26510985318905933, " "0.6350885463275475, 0.18209889263574142], " - "\"vrQmBC-2WlpWML-V5S-OLgM\":[0.6982221340596457, 0.9260447299229463, " + "vrQmBC-2WlpWML-V5S-OLgM:[0.6982221340596457, 0.9260447299229463, " "0.12488042737255534, 0.8859407191137862, 0.03201490973378984, " "0.8371916387557367, 0.7894434066323907, 0.29667576138232743, 0.9837777568426148, " - "0.7773721913552772], \"3ZbiXK-VvmhFcg-09V-w3g3\":[0.20509046053951785, " + "0.7773721913552772], 3ZbiXK-VvmhFcg-09V-w3g3:[0.20509046053951785, " "0.9175575704931109, 0.305788438361256, 0.9923240410251069, 0.6612939841907548, " "0.5922056063112593, 0.15750800821536715, 0.6374743124669565, 0.4158097731627699, " - "0.00302193321816846], \"gMswpS-Ele9wHM-Uxp-VxzC\":[0.14378032144751685, " + "0.00302193321816846], gMswpS-Ele9wHM-Uxp-VxzC:[0.14378032144751685, " "0.627919779177473, 0.6188731271454715, 0.8088384184584442, 0.8169160298605824, " "0.9051151670055427, 0.558001941204895, 0.029409463113641787, 0.9532987674717762, " - "0.20833228278241533], \"TT9P9f-PXjQnvN-RBx-xRiS\":[0.8276005878909756, " + "0.20833228278241533], TT9P9f-PXjQnvN-RBx-xRiS:[0.8276005878909756, " "0.470950932860423, 0.2442851528127543, 0.710599416715854, 0.3353731152359334, " "0.622947602340124, 0.30675353671676797, 0.8190741661938367, 0.633630372770242, " - "0.9436322366112492], \"gLAnZc-oF7PC9o-ryd-MOXr\":[0.9742716809818137, " + "0.9436322366112492], gLAnZc-oF7PC9o-ryd-MOXr:[0.9742716809818137, " "0.9114038616933997, 0.47459239268645104, 0.6054569900795078, 0.5515590901916287, " "0.8833310208917589, 0.96476090778518, 0.8873874315592357, 0.3577701257062156, " - "0.6993447306713452], \"zrq6BY-7FJg3hc-Dd1-bAJn\":[0.1038405592062176, " + "0.6993447306713452], zrq6BY-7FJg3hc-Dd1-bAJn:[0.1038405592062176, " "0.6757819253774818, 0.6386535502499314, 0.23598674876945303, " "0.11046582465777044, 0.6426056925348297, 0.17289073092250662, " "0.37116009951425233, 0.594677969672274, 0.49351456402872274], " - "\"gCKqtW-bLaoxgZ-CuW-M2re\":[0.934169137905867, 0.12015121444469123, " + "gCKqtW-bLaoxgZ-CuW-M2re:[0.934169137905867, 0.12015121444469123, " "0.5009923777544698, 0.4689139716802634, 0.7226298925299507, 0.33486164698864984, " "0.32944768657449996, 0.5051366150918063, 0.03228636228382431, " - "0.48211773870118435], \"SWqhI2-XnF9jVR-dT1-Yrtt\":[0.8005897112110444, " + "0.48211773870118435], SWqhI2-XnF9jVR-dT1-Yrtt:[0.8005897112110444, " "0.899180582368993, 0.9232176819588501, 0.8615673086606942, 0.9248122266449379, " "0.5586489299212893, 0.40494513773898455, 0.4752644689010731, 0.6668395567417462, " - "0.9068738374244337], \"Z85F6M-cy5K4GP-7I5-5KS9\":[0.34761241187833714, " + "0.9068738374244337], Z85F6M-cy5K4GP-7I5-5KS9:[0.34761241187833714, " "0.46467162849990507, 0.009781307454025168, 0.3174295126364216, " "0.6405423361175397, 0.33838144910731327, 0.328860321648657, " "0.032638966917555856, 0.32782524002924884, 0.7675689545937956], " - "\"rlcnbo-tFg1FfP-ra6-D9Z8\":[0.7450713997349928, 0.792502852203968, " + "rlcnbo-tFg1FfP-ra6-D9Z8:[0.7450713997349928, 0.792502852203968, " "0.9034039182796755, 0.49131654565079996, 0.25223293077647946, " "0.9827253462450637, 0.1684868582627418, 0.0417161505112974, 0.8498128570850716, " "0.8948779001812955]}"}, @@ -1116,16 +1140,16 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { "7166939407858642,\"fbxIwJ-HLvW94X-tPn-JgKT\":0.05904881148976504,\"ylE7y1-" "wI3UhjR-ecQ-bNfo\":0.9293354174058581,\"zA0pEV-Lm8g4wq-NJc-TDou\":0." "4000067127237942}}"}, - {"{\"5H6iPe-CRvVE5Q-QnG-8WQb\":{}, " - "\"stDa6g-GML89aZ-w5u-LBe0\":{\"Vlekcq-LDCMo6f-J7U-6rwB\":0.15375824233866453, " - "\"4ljyNE-JMK1bSp-c05-EajL\":0.36153399717116075}, " - "\"URvXyY-SMttaG4-Zol-mPak\":{\"xVaeqR-cj8I6EM-3Nt-queD\":0.003968938824538082, " - "\"Vt2mSs-wacYDvl-qUi-B7kI\":0.6900852274982441, " - "\"i3cJJh-oskdqti-KGU-U6gC\":0.40773692843073994}, " - "\"N3R9TI-jtBPGOQ-uRc-aWAD\":{\"xmGI09-FaCFrrR-O5J-29eu\":0.7166939407858642, " - "\"fbxIwJ-HLvW94X-tPn-JgKT\":0.05904881148976504, " - "\"ylE7y1-wI3UhjR-ecQ-bNfo\":0.9293354174058581, " - "\"zA0pEV-Lm8g4wq-NJc-TDou\":0.4000067127237942}}"}, + {"{5H6iPe-CRvVE5Q-QnG-8WQb:{}, " + "stDa6g-GML89aZ-w5u-LBe0:{Vlekcq-LDCMo6f-J7U-6rwB:0.15375824233866453, " + "4ljyNE-JMK1bSp-c05-EajL:0.36153399717116075}, " + "URvXyY-SMttaG4-Zol-mPak:{xVaeqR-cj8I6EM-3Nt-queD:0.003968938824538082, " + "Vt2mSs-wacYDvl-qUi-B7kI:0.6900852274982441, " + "i3cJJh-oskdqti-KGU-U6gC:0.40773692843073994}, " + "N3R9TI-jtBPGOQ-uRc-aWAD:{xmGI09-FaCFrrR-O5J-29eu:0.7166939407858642, " + "fbxIwJ-HLvW94X-tPn-JgKT:0.05904881148976504, " + "ylE7y1-wI3UhjR-ecQ-bNfo:0.9293354174058581, " + "zA0pEV-Lm8g4wq-NJc-TDou:0.4000067127237942}}"}, {""}, {"{5H6iPe-CRvVE5Q-QnG-8WQb:{}, " "stDa6g-GML89aZ-w5u-LBe0:{Vlekcq-LDCMo6f-J7U-6rwB:0.15375824233866453, " diff --git a/regression-test/data/json_p0/test_json_load_and_function.out b/regression-test/data/json_p0/test_json_load_and_function.out index faf2e919a42af2..5fdb5342cbee3b 100644 --- a/regression-test/data/json_p0/test_json_load_and_function.out +++ b/regression-test/data/json_p0/test_json_load_and_function.out @@ -1,7 +1,7 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -21,7 +21,7 @@ -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -47,7 +47,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -73,7 +73,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -99,7 +99,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -125,7 +125,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -151,7 +151,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -177,7 +177,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -203,7 +203,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -229,7 +229,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -255,7 +255,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -281,7 +281,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -307,7 +307,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -333,7 +333,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -359,7 +359,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -385,7 +385,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -411,7 +411,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -437,7 +437,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -463,7 +463,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -489,7 +489,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -515,7 +515,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -541,7 +541,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -567,7 +567,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -593,7 +593,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -619,7 +619,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -645,7 +645,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -671,7 +671,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -697,7 +697,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -723,7 +723,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -749,7 +749,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -775,7 +775,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -801,7 +801,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -827,7 +827,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -853,7 +853,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -879,7 +879,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -905,7 +905,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -931,7 +931,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -957,7 +957,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -983,7 +983,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1009,7 +1009,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1035,7 +1035,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1061,7 +1061,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1087,7 +1087,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1113,7 +1113,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1139,7 +1139,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1165,7 +1165,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1191,7 +1191,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1217,7 +1217,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1243,7 +1243,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1269,7 +1269,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1295,7 +1295,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1321,7 +1321,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1347,7 +1347,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1373,7 +1373,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1399,7 +1399,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1425,7 +1425,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1451,7 +1451,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1477,7 +1477,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1503,7 +1503,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1529,7 +1529,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1555,7 +1555,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1581,7 +1581,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1607,7 +1607,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1633,7 +1633,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1659,7 +1659,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1685,7 +1685,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1711,7 +1711,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1737,7 +1737,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1763,7 +1763,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1789,7 +1789,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1815,7 +1815,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1841,7 +1841,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1867,7 +1867,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1893,7 +1893,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1919,7 +1919,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1945,7 +1945,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1971,7 +1971,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1997,7 +1997,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2023,7 +2023,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2049,7 +2049,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2075,7 +2075,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2101,7 +2101,7 @@ -- !json_extract_largeint_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -2127,7 +2127,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2153,7 +2153,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2179,7 +2179,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2205,7 +2205,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2231,7 +2231,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2257,7 +2257,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2283,7 +2283,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2309,7 +2309,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2335,7 +2335,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2361,7 +2361,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2387,7 +2387,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2413,7 +2413,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2439,7 +2439,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2465,7 +2465,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2491,7 +2491,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2517,7 +2517,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2543,7 +2543,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2569,7 +2569,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2595,7 +2595,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2621,7 +2621,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2647,7 +2647,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2673,7 +2673,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2699,7 +2699,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2725,7 +2725,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2751,7 +2751,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2777,7 +2777,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -2803,7 +2803,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2829,7 +2829,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2855,7 +2855,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2881,7 +2881,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2907,7 +2907,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2933,7 +2933,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2959,7 +2959,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2985,7 +2985,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3011,7 +3011,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3037,7 +3037,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3063,7 +3063,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3089,7 +3089,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3115,7 +3115,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3141,7 +3141,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3167,7 +3167,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3193,7 +3193,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3219,7 +3219,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3245,7 +3245,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -3271,7 +3271,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3297,7 +3297,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3323,7 +3323,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3349,7 +3349,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3375,7 +3375,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3401,7 +3401,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3427,7 +3427,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3453,7 +3453,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3479,7 +3479,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3505,7 +3505,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3531,7 +3531,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3557,7 +3557,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3583,7 +3583,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3609,7 +3609,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3635,7 +3635,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3661,7 +3661,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3687,7 +3687,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3713,7 +3713,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null true 3 true false 4 false false 5 100 false @@ -3739,7 +3739,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3765,7 +3765,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3791,7 +3791,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3817,7 +3817,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3843,7 +3843,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3869,7 +3869,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3895,7 +3895,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3921,7 +3921,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3947,7 +3947,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3973,7 +3973,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3999,7 +3999,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4025,7 +4025,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4051,7 +4051,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4077,7 +4077,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4103,7 +4103,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4129,7 +4129,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4155,7 +4155,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4181,7 +4181,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null true 3 true true 4 false true 5 100 true @@ -4207,7 +4207,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4233,7 +4233,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4259,7 +4259,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4285,7 +4285,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4311,7 +4311,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4337,7 +4337,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4363,7 +4363,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4389,7 +4389,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4415,7 +4415,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4441,7 +4441,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4467,7 +4467,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4493,7 +4493,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4519,7 +4519,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4545,7 +4545,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4571,7 +4571,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4597,7 +4597,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4623,7 +4623,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4649,7 +4649,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true bool 4 false bool 5 100 int @@ -4675,7 +4675,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4701,7 +4701,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4727,7 +4727,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4753,7 +4753,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4779,7 +4779,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4805,7 +4805,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4831,7 +4831,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4857,7 +4857,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4883,7 +4883,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4909,7 +4909,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4935,7 +4935,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4961,7 +4961,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4987,7 +4987,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5013,7 +5013,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5039,7 +5039,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5065,7 +5065,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5091,7 +5091,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5117,7 +5117,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -5143,7 +5143,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5169,7 +5169,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5195,7 +5195,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5221,7 +5221,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -5247,7 +5247,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -5273,7 +5273,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -5299,7 +5299,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5325,7 +5325,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5351,7 +5351,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5377,7 +5377,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5403,7 +5403,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -5504,7 +5504,7 @@ false -- !select -- 1 \N -2 \N +2 1 3 1 4 1 5 1 @@ -5539,7 +5539,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null null 3 true null 4 false null 5 100 null @@ -5565,7 +5565,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null [null,null] 3 true [null,null] 4 false [null,null] 5 100 [null,null] @@ -5591,7 +5591,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null [null,null] 3 true [null,null] 4 false [null,null] 5 100 [null,null] @@ -5617,7 +5617,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5643,7 +5643,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null [null,null,null] 3 true [null,null,null] 4 false [null,null,null] 5 100 [null,null,null] diff --git a/regression-test/data/json_p0/test_json_load_unique_key_and_function.out b/regression-test/data/json_p0/test_json_load_unique_key_and_function.out index b8486e57a4496a..f63959c2e02aa3 100644 --- a/regression-test/data/json_p0/test_json_load_unique_key_and_function.out +++ b/regression-test/data/json_p0/test_json_load_unique_key_and_function.out @@ -1,7 +1,7 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -21,7 +21,7 @@ -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -44,7 +44,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -67,7 +67,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -90,7 +90,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -113,7 +113,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -136,7 +136,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -159,7 +159,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -182,7 +182,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -205,7 +205,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -228,7 +228,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -251,7 +251,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -274,7 +274,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -297,7 +297,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -320,7 +320,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -343,7 +343,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -366,7 +366,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -389,7 +389,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -412,7 +412,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -435,7 +435,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -458,7 +458,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -481,7 +481,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -504,7 +504,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -527,7 +527,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -550,7 +550,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -573,7 +573,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -596,7 +596,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -619,7 +619,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -642,7 +642,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -665,7 +665,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -688,7 +688,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -711,7 +711,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -734,7 +734,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -757,7 +757,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -780,7 +780,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -803,7 +803,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -826,7 +826,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -849,7 +849,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -872,7 +872,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -895,7 +895,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -918,7 +918,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -941,7 +941,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -964,7 +964,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -987,7 +987,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1010,7 +1010,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1033,7 +1033,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1056,7 +1056,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1079,7 +1079,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1102,7 +1102,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1125,7 +1125,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1148,7 +1148,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1171,7 +1171,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1194,7 +1194,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1217,7 +1217,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1240,7 +1240,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1263,7 +1263,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1286,7 +1286,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1309,7 +1309,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1332,7 +1332,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1355,7 +1355,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1378,7 +1378,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1401,7 +1401,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1424,7 +1424,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1447,7 +1447,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1470,7 +1470,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1493,7 +1493,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1516,7 +1516,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1539,7 +1539,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1562,7 +1562,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1585,7 +1585,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1608,7 +1608,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1631,7 +1631,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1654,7 +1654,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1677,7 +1677,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1700,7 +1700,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1723,7 +1723,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1746,7 +1746,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1769,7 +1769,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1792,7 +1792,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1815,7 +1815,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1838,7 +1838,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1861,7 +1861,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -1884,7 +1884,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1907,7 +1907,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1930,7 +1930,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1953,7 +1953,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1976,7 +1976,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1999,7 +1999,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2022,7 +2022,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2045,7 +2045,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2068,7 +2068,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2091,7 +2091,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2114,7 +2114,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2137,7 +2137,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2160,7 +2160,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2183,7 +2183,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2206,7 +2206,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2229,7 +2229,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2252,7 +2252,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2275,7 +2275,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -2298,7 +2298,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2321,7 +2321,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2344,7 +2344,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2367,7 +2367,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2390,7 +2390,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2413,7 +2413,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2436,7 +2436,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2459,7 +2459,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2482,7 +2482,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2505,7 +2505,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2528,7 +2528,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2551,7 +2551,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2574,7 +2574,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2597,7 +2597,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2620,7 +2620,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2643,7 +2643,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2666,7 +2666,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2689,7 +2689,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null true 3 true false 4 false false 5 100 false @@ -2712,7 +2712,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2735,7 +2735,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2758,7 +2758,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2781,7 +2781,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2804,7 +2804,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2827,7 +2827,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2850,7 +2850,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2873,7 +2873,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2896,7 +2896,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2919,7 +2919,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2942,7 +2942,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2965,7 +2965,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2988,7 +2988,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3011,7 +3011,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3034,7 +3034,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3057,7 +3057,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3080,7 +3080,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3103,7 +3103,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null true 3 true true 4 false true 5 100 true @@ -3126,7 +3126,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3149,7 +3149,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3172,7 +3172,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3195,7 +3195,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3218,7 +3218,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3241,7 +3241,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3264,7 +3264,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3287,7 +3287,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3310,7 +3310,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3333,7 +3333,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3356,7 +3356,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3379,7 +3379,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3402,7 +3402,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3425,7 +3425,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3448,7 +3448,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3471,7 +3471,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3494,7 +3494,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -3517,7 +3517,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true bool 4 false bool 5 100 int @@ -3540,7 +3540,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3563,7 +3563,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3586,7 +3586,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3609,7 +3609,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3632,7 +3632,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3655,7 +3655,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3678,7 +3678,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3701,7 +3701,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3724,7 +3724,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3747,7 +3747,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3770,7 +3770,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3793,7 +3793,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3816,7 +3816,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3839,7 +3839,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3862,7 +3862,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3885,7 +3885,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3908,7 +3908,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3931,7 +3931,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -3954,7 +3954,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -3977,7 +3977,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -4000,7 +4000,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -4023,7 +4023,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -4046,7 +4046,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -4069,7 +4069,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -4092,7 +4092,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -4115,7 +4115,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -4138,7 +4138,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -4161,7 +4161,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -4184,7 +4184,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 diff --git a/regression-test/data/jsonb_p0/test_jsonb_load_and_function.out b/regression-test/data/jsonb_p0/test_jsonb_load_and_function.out index 87bc6c4a2a6e6e..b1731cab218005 100644 --- a/regression-test/data/jsonb_p0/test_jsonb_load_and_function.out +++ b/regression-test/data/jsonb_p0/test_jsonb_load_and_function.out @@ -1,7 +1,7 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -21,7 +21,7 @@ -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -47,7 +47,7 @@ -- !jsonb_extract_select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -73,7 +73,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -99,7 +99,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -125,7 +125,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -151,7 +151,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -177,7 +177,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -203,7 +203,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -229,7 +229,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -255,7 +255,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -281,7 +281,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -307,7 +307,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -333,7 +333,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -359,7 +359,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -385,7 +385,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -411,7 +411,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -437,7 +437,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -463,7 +463,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -489,7 +489,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -515,7 +515,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -541,7 +541,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -567,7 +567,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -593,7 +593,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -619,7 +619,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -645,7 +645,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -671,7 +671,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -697,7 +697,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -723,7 +723,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -749,7 +749,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -775,7 +775,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -801,7 +801,7 @@ -- !jsonb_extract_multipath -- 1 \N \N -2 \N \N +2 null [null,null,null,null] 3 true [true,null,null,null] 4 false [false,null,null,null] 5 100 [100,null,null,null] @@ -827,7 +827,7 @@ -- !jsonb_extract_string_select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -853,7 +853,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -879,7 +879,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -905,7 +905,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -931,7 +931,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -957,7 +957,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -983,7 +983,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1009,7 +1009,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1035,7 +1035,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1061,7 +1061,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1087,7 +1087,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1113,7 +1113,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1139,7 +1139,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1165,7 +1165,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1191,7 +1191,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1217,7 +1217,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1243,7 +1243,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1269,7 +1269,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1295,7 +1295,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1321,7 +1321,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1347,7 +1347,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1373,7 +1373,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1399,7 +1399,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1425,7 +1425,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1451,7 +1451,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1477,7 +1477,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1503,7 +1503,7 @@ -- !jsonb_extract_int_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1529,7 +1529,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1555,7 +1555,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1581,7 +1581,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1607,7 +1607,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1633,7 +1633,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1659,7 +1659,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1685,7 +1685,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1711,7 +1711,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1737,7 +1737,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1763,7 +1763,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1789,7 +1789,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1815,7 +1815,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1841,7 +1841,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1867,7 +1867,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1893,7 +1893,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1919,7 +1919,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1945,7 +1945,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1971,7 +1971,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1997,7 +1997,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2023,7 +2023,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2049,7 +2049,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2075,7 +2075,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2101,7 +2101,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2127,7 +2127,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2153,7 +2153,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2179,7 +2179,7 @@ -- !jsonb_extract_bigint_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -2205,7 +2205,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2231,7 +2231,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2257,7 +2257,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2283,7 +2283,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2309,7 +2309,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2335,7 +2335,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2361,7 +2361,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2387,7 +2387,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2413,7 +2413,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2439,7 +2439,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2465,7 +2465,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2491,7 +2491,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2517,7 +2517,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2543,7 +2543,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2569,7 +2569,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2595,7 +2595,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2621,7 +2621,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2647,7 +2647,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2673,7 +2673,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2699,7 +2699,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2725,7 +2725,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2751,7 +2751,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2777,7 +2777,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2803,7 +2803,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2829,7 +2829,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2855,7 +2855,7 @@ -- !jsonb_extract_largeint_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -2881,7 +2881,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2907,7 +2907,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2933,7 +2933,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2959,7 +2959,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2985,7 +2985,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3011,7 +3011,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3037,7 +3037,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3063,7 +3063,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3089,7 +3089,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3115,7 +3115,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3141,7 +3141,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3167,7 +3167,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3193,7 +3193,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3219,7 +3219,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3245,7 +3245,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3271,7 +3271,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3297,7 +3297,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3323,7 +3323,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3349,7 +3349,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3375,7 +3375,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3401,7 +3401,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3427,7 +3427,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3453,7 +3453,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3479,7 +3479,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3505,7 +3505,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3531,7 +3531,7 @@ -- !jsonb_extract_double_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -3557,7 +3557,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3583,7 +3583,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3609,7 +3609,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3635,7 +3635,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3661,7 +3661,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3687,7 +3687,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3713,7 +3713,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3739,7 +3739,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3765,7 +3765,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3791,7 +3791,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3817,7 +3817,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3843,7 +3843,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3869,7 +3869,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3895,7 +3895,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3921,7 +3921,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3947,7 +3947,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3973,7 +3973,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3999,7 +3999,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4025,7 +4025,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4051,7 +4051,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4077,7 +4077,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4103,7 +4103,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4129,7 +4129,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4155,7 +4155,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4181,7 +4181,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4207,7 +4207,7 @@ -- !jsonb_extract_bool_select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -4233,7 +4233,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4259,7 +4259,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4285,7 +4285,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4311,7 +4311,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4337,7 +4337,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4363,7 +4363,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4389,7 +4389,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4415,7 +4415,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4441,7 +4441,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4467,7 +4467,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4493,7 +4493,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4519,7 +4519,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4545,7 +4545,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4571,7 +4571,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4597,7 +4597,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4623,7 +4623,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4649,7 +4649,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4675,7 +4675,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4701,7 +4701,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4727,7 +4727,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4753,7 +4753,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4779,7 +4779,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4805,7 +4805,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4831,7 +4831,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4857,7 +4857,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4883,7 +4883,7 @@ -- !jsonb_extract_isnull_select -- 1 \N \N -2 \N \N +2 null true 3 true false 4 false false 5 100 false @@ -4909,7 +4909,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4935,7 +4935,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4961,7 +4961,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4987,7 +4987,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5013,7 +5013,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5039,7 +5039,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5065,7 +5065,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5091,7 +5091,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5117,7 +5117,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5143,7 +5143,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5169,7 +5169,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5195,7 +5195,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5221,7 +5221,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5247,7 +5247,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5273,7 +5273,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5299,7 +5299,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5325,7 +5325,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5351,7 +5351,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5377,7 +5377,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5403,7 +5403,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5429,7 +5429,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5455,7 +5455,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5481,7 +5481,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5507,7 +5507,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5533,7 +5533,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5559,7 +5559,7 @@ -- !jsonb_exists_path_select -- 1 \N \N -2 \N \N +2 null true 3 true true 4 false true 5 100 true @@ -5585,7 +5585,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5611,7 +5611,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5637,7 +5637,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5663,7 +5663,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5689,7 +5689,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5715,7 +5715,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5741,7 +5741,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5767,7 +5767,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5793,7 +5793,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5819,7 +5819,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5845,7 +5845,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5871,7 +5871,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5897,7 +5897,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5923,7 +5923,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5949,7 +5949,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -5975,7 +5975,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6001,7 +6001,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6027,7 +6027,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6053,7 +6053,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6079,7 +6079,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6105,7 +6105,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6131,7 +6131,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6157,7 +6157,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6183,7 +6183,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6209,7 +6209,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -6235,7 +6235,7 @@ -- !jsonb_type_select -- 1 \N \N -2 \N \N +2 null null 3 true bool 4 false bool 5 100 int @@ -6261,7 +6261,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6287,7 +6287,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6313,7 +6313,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6339,7 +6339,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6365,7 +6365,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6391,7 +6391,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6417,7 +6417,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6443,7 +6443,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6469,7 +6469,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6495,7 +6495,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6521,7 +6521,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6547,7 +6547,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6573,7 +6573,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6599,7 +6599,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6625,7 +6625,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6651,7 +6651,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6677,7 +6677,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6703,7 +6703,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6729,7 +6729,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6755,7 +6755,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6781,7 +6781,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6807,7 +6807,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6833,7 +6833,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6859,7 +6859,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6885,7 +6885,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -6911,7 +6911,7 @@ -- !cast_from_select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -6937,7 +6937,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -6963,7 +6963,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -6989,7 +6989,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -7015,7 +7015,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -7041,7 +7041,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -7067,7 +7067,7 @@ -- !cast_to_select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -7093,7 +7093,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -7119,7 +7119,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -7145,7 +7145,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -7171,7 +7171,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -7197,7 +7197,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -7298,7 +7298,7 @@ false -- !select -- 1 \N -2 \N +2 1 3 1 4 1 5 1 @@ -7333,7 +7333,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null null 3 true null 4 false null 5 100 null @@ -7359,7 +7359,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null [null,null] 3 true [null,null] 4 false [null,null] 5 100 [null,null] @@ -7385,7 +7385,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null [null,null] 3 true [null,null] 4 false [null,null] 5 100 [null,null] @@ -7411,7 +7411,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -7437,7 +7437,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null [null,null,null] 3 true [null,null,null] 4 false [null,null,null] 5 100 [null,null,null] @@ -7463,7 +7463,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null null 3 true null 4 false null 5 100 null @@ -7489,7 +7489,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null null 3 true null 4 false null 5 100 null @@ -7515,7 +7515,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -7541,7 +7541,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null null 3 true null 4 false null 5 100 null @@ -7567,7 +7567,7 @@ false -- !select -- 1 \N \N -2 \N \N +2 null null 3 true null 4 false null 5 100 null diff --git a/regression-test/data/jsonb_p0/test_jsonb_load_unique_key_and_function.out b/regression-test/data/jsonb_p0/test_jsonb_load_unique_key_and_function.out index 6eae6b35e5c123..632badf32e1de8 100644 --- a/regression-test/data/jsonb_p0/test_jsonb_load_unique_key_and_function.out +++ b/regression-test/data/jsonb_p0/test_jsonb_load_unique_key_and_function.out @@ -1,7 +1,7 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -21,7 +21,7 @@ -- !select -- 1 \N -2 \N +2 null 3 true 4 false 5 100 @@ -44,7 +44,7 @@ -- !jsonb_extract_select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -67,7 +67,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -90,7 +90,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -113,7 +113,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -136,7 +136,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -159,7 +159,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -182,7 +182,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -205,7 +205,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -228,7 +228,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -251,7 +251,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -274,7 +274,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -297,7 +297,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -320,7 +320,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -343,7 +343,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -366,7 +366,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -389,7 +389,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -412,7 +412,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -435,7 +435,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -458,7 +458,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -481,7 +481,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -504,7 +504,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -527,7 +527,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -550,7 +550,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -573,7 +573,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -596,7 +596,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -619,7 +619,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -642,7 +642,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -665,7 +665,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -688,7 +688,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -711,7 +711,7 @@ -- !jsonb_extract_multipath -- 1 \N \N -2 \N \N +2 null [null,null,null,null] 3 true [true,null,null,null] 4 false [false,null,null,null] 5 100 [100,null,null,null] @@ -734,7 +734,7 @@ -- !jsonb_extract_string_select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -757,7 +757,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -780,7 +780,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -803,7 +803,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -826,7 +826,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -849,7 +849,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -872,7 +872,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -895,7 +895,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -918,7 +918,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -941,7 +941,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -964,7 +964,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -987,7 +987,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1010,7 +1010,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1033,7 +1033,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1056,7 +1056,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1079,7 +1079,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1102,7 +1102,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1125,7 +1125,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1148,7 +1148,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1171,7 +1171,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1194,7 +1194,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1217,7 +1217,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1240,7 +1240,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1263,7 +1263,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1286,7 +1286,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1309,7 +1309,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1332,7 +1332,7 @@ -- !jsonb_extract_int_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1355,7 +1355,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1378,7 +1378,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1401,7 +1401,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1424,7 +1424,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1447,7 +1447,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1470,7 +1470,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1493,7 +1493,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1516,7 +1516,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1539,7 +1539,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1562,7 +1562,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1585,7 +1585,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1608,7 +1608,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1631,7 +1631,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1654,7 +1654,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1677,7 +1677,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1700,7 +1700,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1723,7 +1723,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1746,7 +1746,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1769,7 +1769,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1792,7 +1792,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1815,7 +1815,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1838,7 +1838,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1861,7 +1861,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1884,7 +1884,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1907,7 +1907,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1930,7 +1930,7 @@ -- !jsonb_extract_bigint_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -1953,7 +1953,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1976,7 +1976,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -1999,7 +1999,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2022,7 +2022,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2045,7 +2045,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2068,7 +2068,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2091,7 +2091,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2114,7 +2114,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2137,7 +2137,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2160,7 +2160,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2183,7 +2183,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2206,7 +2206,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2229,7 +2229,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2252,7 +2252,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2275,7 +2275,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2298,7 +2298,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2321,7 +2321,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2344,7 +2344,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2367,7 +2367,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2390,7 +2390,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2413,7 +2413,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2436,7 +2436,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2459,7 +2459,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2482,7 +2482,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2505,7 +2505,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2528,7 +2528,7 @@ -- !jsonb_extract_double_select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -2551,7 +2551,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2574,7 +2574,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2597,7 +2597,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2620,7 +2620,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2643,7 +2643,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2666,7 +2666,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2689,7 +2689,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2712,7 +2712,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2735,7 +2735,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2758,7 +2758,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2781,7 +2781,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2804,7 +2804,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2827,7 +2827,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2850,7 +2850,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2873,7 +2873,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2896,7 +2896,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2919,7 +2919,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2942,7 +2942,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2965,7 +2965,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -2988,7 +2988,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3011,7 +3011,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3034,7 +3034,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3057,7 +3057,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3080,7 +3080,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3103,7 +3103,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3126,7 +3126,7 @@ -- !jsonb_extract_bool_select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -3149,7 +3149,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3172,7 +3172,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3195,7 +3195,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3218,7 +3218,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3241,7 +3241,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3264,7 +3264,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3287,7 +3287,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3310,7 +3310,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3333,7 +3333,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3356,7 +3356,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3379,7 +3379,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3402,7 +3402,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3425,7 +3425,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3448,7 +3448,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3471,7 +3471,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3494,7 +3494,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3517,7 +3517,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3540,7 +3540,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3563,7 +3563,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3586,7 +3586,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3609,7 +3609,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3632,7 +3632,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3655,7 +3655,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3678,7 +3678,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3701,7 +3701,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3724,7 +3724,7 @@ -- !jsonb_extract_isnull_select -- 1 \N \N -2 \N \N +2 null true 3 true false 4 false false 5 100 false @@ -3747,7 +3747,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3770,7 +3770,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3793,7 +3793,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3816,7 +3816,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3839,7 +3839,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3862,7 +3862,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3885,7 +3885,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3908,7 +3908,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3931,7 +3931,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3954,7 +3954,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -3977,7 +3977,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4000,7 +4000,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4023,7 +4023,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4046,7 +4046,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4069,7 +4069,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4092,7 +4092,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4115,7 +4115,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4138,7 +4138,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4161,7 +4161,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4184,7 +4184,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4207,7 +4207,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4230,7 +4230,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4253,7 +4253,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4276,7 +4276,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4299,7 +4299,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4322,7 +4322,7 @@ -- !jsonb_exists_path_select -- 1 \N \N -2 \N \N +2 null true 3 true true 4 false true 5 100 true @@ -4345,7 +4345,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4368,7 +4368,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4391,7 +4391,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4414,7 +4414,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4437,7 +4437,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4460,7 +4460,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4483,7 +4483,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4506,7 +4506,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4529,7 +4529,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4552,7 +4552,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4575,7 +4575,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4598,7 +4598,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4621,7 +4621,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4644,7 +4644,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4667,7 +4667,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4690,7 +4690,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4713,7 +4713,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4736,7 +4736,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4759,7 +4759,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4782,7 +4782,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4805,7 +4805,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4828,7 +4828,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4851,7 +4851,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4874,7 +4874,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4897,7 +4897,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null false 3 true false 4 false false 5 100 false @@ -4920,7 +4920,7 @@ -- !jsonb_type_select -- 1 \N \N -2 \N \N +2 null null 3 true bool 4 false bool 5 100 int @@ -4943,7 +4943,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4966,7 +4966,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -4989,7 +4989,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5012,7 +5012,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5035,7 +5035,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5058,7 +5058,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5081,7 +5081,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5104,7 +5104,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5127,7 +5127,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5150,7 +5150,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5173,7 +5173,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5196,7 +5196,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5219,7 +5219,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5242,7 +5242,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5265,7 +5265,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5288,7 +5288,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5311,7 +5311,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5334,7 +5334,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5357,7 +5357,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5380,7 +5380,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5403,7 +5403,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5426,7 +5426,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5449,7 +5449,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5472,7 +5472,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5495,7 +5495,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 \N @@ -5518,7 +5518,7 @@ -- !cast_from_select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -5541,7 +5541,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5564,7 +5564,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5587,7 +5587,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5610,7 +5610,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100.0 @@ -5633,7 +5633,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 @@ -5656,7 +5656,7 @@ -- !cast_to_select -- 1 \N \N -2 \N \N +2 null \N 3 true true 4 false false 5 100 \N @@ -5679,7 +5679,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5702,7 +5702,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5725,7 +5725,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5748,7 +5748,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null \N 3 true \N 4 false \N 5 100 100 @@ -5771,7 +5771,7 @@ -- !select -- 1 \N \N -2 \N \N +2 null null 3 true true 4 false false 5 100 100 diff --git a/regression-test/data/load_p0/stream_load/test_map_load_and_function.out b/regression-test/data/load_p0/stream_load/test_map_load_and_function.out index b87c209f710fe2..4b742ac853206b 100644 --- a/regression-test/data/load_p0/stream_load/test_map_load_and_function.out +++ b/regression-test/data/load_p0/stream_load/test_map_load_and_function.out @@ -10,9 +10,9 @@ 8 {} 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} 10 {} -11 {"k1":4, "k2":400} +11 {""k1'":4, "k2":400} 12 {"k3":23, NULL:20, "k4":NULL} -13 {NULL:1} +13 {"null":1} 15 \N 16 {NULL:NULL} @@ -46,9 +46,9 @@ 8 {} 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} 10 {} -11 {"k1":4, "k2":400} +11 {""k1'":4, "k2":400} 12 {"k3":23, NULL:20, "k4":NULL} -13 {NULL:1} +13 {"null":1} 15 \N 16 {NULL:NULL} 17 \N @@ -125,9 +125,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} 4 +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -144,9 +144,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} 400 +11 {""k1'":4, "k2":400} 400 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -163,9 +163,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -182,9 +182,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -201,9 +201,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -220,9 +220,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -239,9 +239,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -258,9 +258,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -277,9 +277,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} 2 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -296,9 +296,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} 90 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -315,9 +315,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} 1 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -334,9 +334,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -353,9 +353,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} 20 -13 {NULL:1} 1 +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -372,9 +372,9 @@ k22 8 {} \N 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} \N 10 {} \N -11 {"k1":4, "k2":400} \N +11 {""k1'":4, "k2":400} \N 12 {"k3":23, NULL:20, "k4":NULL} \N -13 {NULL:1} \N +13 {"null":1} \N 15 \N \N 16 {NULL:NULL} \N 17 \N \N @@ -394,9 +394,9 @@ k22 8 {} 0 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} 3 10 {} 0 -11 {"k1":4, "k2":400} 2 +11 {""k1'":4, "k2":400} 2 12 {"k3":23, NULL:20, "k4":NULL} 3 -13 {NULL:1} 1 +13 {"null":1} 1 15 \N \N 16 {NULL:NULL} 1 17 \N \N @@ -428,9 +428,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} true +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -447,9 +447,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} true +11 {""k1'":4, "k2":400} true 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -466,9 +466,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -485,9 +485,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -504,9 +504,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -523,9 +523,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -542,9 +542,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -561,9 +561,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -580,9 +580,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -599,9 +599,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -618,9 +618,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} true 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -637,9 +637,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -656,9 +656,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} true -13 {NULL:1} true +13 {"null":1} false 15 \N \N 16 {NULL:NULL} true 17 \N \N @@ -675,9 +675,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -706,9 +706,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} true -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -725,9 +725,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} true -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -744,9 +744,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -763,9 +763,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -782,9 +782,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -801,9 +801,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -820,9 +820,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} true +11 {""k1'":4, "k2":400} true 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -839,9 +839,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -858,9 +858,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -877,9 +877,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -896,9 +896,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -915,9 +915,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -934,9 +934,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} true +13 {"null":1} true 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -953,9 +953,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} true 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -972,9 +972,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -991,9 +991,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} false -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} false 17 \N \N @@ -1010,9 +1010,9 @@ false 8 {} false 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} false 10 {} false -11 {"k1":4, "k2":400} false +11 {""k1'":4, "k2":400} false 12 {"k3":23, NULL:20, "k4":NULL} true -13 {NULL:1} false +13 {"null":1} false 15 \N \N 16 {NULL:NULL} true 17 \N \N @@ -1032,9 +1032,9 @@ false 8 {} [] 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} [" 1,amy ", " k2 ", " k7 "] 10 {} [] -11 {"k1":4, "k2":400} ["k1", "k2"] +11 {""k1'":4, "k2":400} [""k1'", "k2"] 12 {"k3":23, NULL:20, "k4":NULL} ["k3", NULL, "k4"] -13 {NULL:1} [NULL] +13 {"null":1} ["null"] 15 \N \N 16 {NULL:NULL} [NULL] 17 \N \N @@ -1054,9 +1054,9 @@ false 8 {} [] 9 {" 1,amy ":2, " k2 ":90, " k7 ":33} [2, 90, 33] 10 {} [] -11 {"k1":4, "k2":400} [4, 400] +11 {""k1'":4, "k2":400} [4, 400] 12 {"k3":23, NULL:20, "k4":NULL} [23, 20, NULL] -13 {NULL:1} [1] +13 {"null":1} [1] 15 \N \N 16 {NULL:NULL} [NULL] 17 \N \N diff --git a/regression-test/data/nereids_function_p0/agg_function/agg.out b/regression-test/data/nereids_function_p0/agg_function/agg.out index 1bae584dbd4cb7..83a9d43e75e298 100644 --- a/regression-test/data/nereids_function_p0/agg_function/agg.out +++ b/regression-test/data/nereids_function_p0/agg_function/agg.out @@ -323,7 +323,7 @@ -- !sql_avg_Double_agg_phase_3_notnull -- 0 \N -7 0.39999999999999997 +7 0.4000000000000001 5 1.0 -- !sql_avg_Double_agg_phase_4_notnull -- @@ -419,11 +419,11 @@ 1 12.0 -- !sql_avg_weighted_TinyInt_Double_agg_phase_2 -- -12 8.333333333333334 +12 8.333333333333332 -- !sql_avg_weighted_TinyInt_Double_agg_phase_3 -- 0 \N -7 5.0 +7 4.999999999999999 5 10.2 -- !sql_avg_weighted_TinyInt_Double_agg_phase_4 -- @@ -451,23 +451,23 @@ 1 12.0 -- !sql_avg_weighted_TinyInt_Double_agg_phase_2_notnull -- -12 8.333333333333332 +12 8.333333333333334 -- !sql_avg_weighted_TinyInt_Double_agg_phase_3_notnull -- 0 \N -7 5.0 -5 10.199999999999998 +7 4.999999999999999 +5 10.2 -- !sql_avg_weighted_TinyInt_Double_agg_phase_4_notnull -- 12 8.333333333333334 -- !sql_avg_weighted_SmallInt_Double_gb -- \N -5.0 +4.999999999999999 10.2 -- !sql_avg_weighted_SmallInt_Double -- -8.333333333333332 +8.333333333333334 -- !sql_avg_weighted_SmallInt_Double_agg_phase_1 -- 0 \N @@ -489,7 +489,7 @@ -- !sql_avg_weighted_SmallInt_Double_agg_phase_3 -- 0 \N -7 5.0 +7 4.999999999999999 5 10.2 -- !sql_avg_weighted_SmallInt_Double_agg_phase_4 -- @@ -500,7 +500,7 @@ 10.2 -- !sql_avg_weighted_SmallInt_Double_notnull -- -8.333333333333332 +8.333333333333334 -- !sql_avg_weighted_SmallInt_Double_agg_phase_1_notnull -- 1 1.0 @@ -517,11 +517,11 @@ 1 12.0 -- !sql_avg_weighted_SmallInt_Double_agg_phase_2_notnull -- -12 8.333333333333332 +12 8.333333333333334 -- !sql_avg_weighted_SmallInt_Double_agg_phase_3_notnull -- 0 \N -7 4.999999999999999 +7 5.0 5 10.2 -- !sql_avg_weighted_SmallInt_Double_agg_phase_4_notnull -- @@ -551,7 +551,7 @@ 1 12.0 -- !sql_avg_weighted_Integer_Double_agg_phase_2 -- -12 8.333333333333334 +12 8.333333333333332 -- !sql_avg_weighted_Integer_Double_agg_phase_3 -- 0 \N @@ -566,7 +566,7 @@ 10.2 -- !sql_avg_weighted_Integer_Double_notnull -- -8.333333333333334 +8.333333333333332 -- !sql_avg_weighted_Integer_Double_agg_phase_1_notnull -- 1 1.0 @@ -583,7 +583,7 @@ 1 12.0 -- !sql_avg_weighted_Integer_Double_agg_phase_2_notnull -- -12 8.333333333333334 +12 8.333333333333332 -- !sql_avg_weighted_Integer_Double_agg_phase_3_notnull -- 0 \N @@ -591,11 +591,11 @@ 5 10.2 -- !sql_avg_weighted_Integer_Double_agg_phase_4_notnull -- -12 8.333333333333332 +12 8.333333333333334 -- !sql_avg_weighted_BigInt_Double_gb -- \N -4.999999999999999 +5.0 10.2 -- !sql_avg_weighted_BigInt_Double -- @@ -621,18 +621,18 @@ -- !sql_avg_weighted_BigInt_Double_agg_phase_3 -- 0 \N -7 5.0 -5 10.199999999999998 +7 4.999999999999999 +5 10.2 -- !sql_avg_weighted_BigInt_Double_agg_phase_4 -- -12 8.333333333333332 +12 8.333333333333334 -- !sql_avg_weighted_BigInt_Double_gb_notnull -- 5.0 10.2 -- !sql_avg_weighted_BigInt_Double_notnull -- -8.333333333333332 +8.333333333333334 -- !sql_avg_weighted_BigInt_Double_agg_phase_1_notnull -- 1 1.0 @@ -661,7 +661,7 @@ -- !sql_avg_weighted_Float_Double_gb -- \N -0.5000000045235667 +0.5000000045235666 1.0200000143051147 -- !sql_avg_weighted_Float_Double -- @@ -683,7 +683,7 @@ 1 1.2000000476837158 -- !sql_avg_weighted_Float_Double_agg_phase_2 -- -12 0.8333333441271231 +12 0.833333344127123 -- !sql_avg_weighted_Float_Double_agg_phase_3 -- 0 \N @@ -691,14 +691,14 @@ 5 1.0200000143051144 -- !sql_avg_weighted_Float_Double_agg_phase_4 -- -12 0.833333344127123 +12 0.8333333441271231 -- !sql_avg_weighted_Float_Double_gb_notnull -- 0.5000000045235667 -1.0200000143051144 +1.0200000143051147 -- !sql_avg_weighted_Float_Double_notnull -- -0.833333344127123 +0.8333333441271231 -- !sql_avg_weighted_Float_Double_agg_phase_1_notnull -- 1 0.10000000149011612 @@ -754,10 +754,10 @@ -- !sql_avg_weighted_Double_Double_agg_phase_3 -- 0 \N 7 0.5000000000000001 -5 1.0199999999999998 +5 1.02 -- !sql_avg_weighted_Double_Double_agg_phase_4 -- -12 0.8333333333333335 +12 0.8333333333333333 -- !sql_avg_weighted_Double_Double_gb_notnull -- 0.5 @@ -781,11 +781,11 @@ 1 1.2 -- !sql_avg_weighted_Double_Double_agg_phase_2_notnull -- -12 0.8333333333333333 +12 0.8333333333333335 -- !sql_avg_weighted_Double_Double_agg_phase_3_notnull -- 0 \N -7 0.49999999999999994 +7 0.5000000000000001 5 1.02 -- !sql_avg_weighted_Double_Double_agg_phase_4_notnull -- @@ -797,7 +797,7 @@ 1.02 -- !sql_avg_weighted_DecimalV2_Double -- -0.8333333333333333 +0.8333333333333334 -- !sql_avg_weighted_DecimalV2_Double_agg_phase_1 -- 0 \N @@ -819,14 +819,14 @@ -- !sql_avg_weighted_DecimalV2_Double_agg_phase_3 -- 0 \N -7 0.5000000000000001 +7 0.5 5 1.02 -- !sql_avg_weighted_DecimalV2_Double_agg_phase_4 -- -12 0.8333333333333334 +12 0.8333333333333333 -- !sql_avg_weighted_DecimalV2_Double_gb_notnull -- -0.49999999999999994 +0.5 1.02 -- !sql_avg_weighted_DecimalV2_Double_notnull -- @@ -851,7 +851,7 @@ -- !sql_avg_weighted_DecimalV2_Double_agg_phase_3_notnull -- 0 \N -7 0.5000000000000001 +7 0.49999999999999994 5 1.02 -- !sql_avg_weighted_DecimalV2_Double_agg_phase_4_notnull -- @@ -3280,7 +3280,7 @@ true 1.4142135623730951 -- !sql_stddev_TinyInt -- -3.452052529534663 +3.452052529534664 -- !sql_stddev_TinyInt_agg_phase_1 -- 0 \N @@ -3302,14 +3302,14 @@ true -- !sql_stddev_TinyInt_agg_phase_3 -- 0 \N -7 1.9999999999999998 +7 2.0 5 1.4142135623730951 -- !sql_stddev_TinyInt_agg_phase_4 -- 12 3.452052529534663 -- !sql_stddev_TinyInt_gb_notnull -- -2.0 +1.9999999999999998 1.4142135623730951 -- !sql_stddev_TinyInt_notnull -- @@ -3335,7 +3335,7 @@ true -- !sql_stddev_TinyInt_agg_phase_3_notnull -- 0 \N 7 2.0 -5 1.4142135623730951 +5 1.4142135623730954 -- !sql_stddev_TinyInt_agg_phase_4_notnull -- 12 3.452052529534663 @@ -3368,8 +3368,8 @@ true -- !sql_stddev_SmallInt_agg_phase_3 -- 0 \N -7 2.0 -5 1.4142135623730951 +7 1.9999999999999998 +5 1.414213562373095 -- !sql_stddev_SmallInt_agg_phase_4 -- 12 3.452052529534663 @@ -3412,7 +3412,7 @@ true 1.4142135623730951 -- !sql_stddev_Integer -- -3.452052529534663 +3.452052529534664 -- !sql_stddev_Integer_agg_phase_1 -- 0 \N @@ -3434,18 +3434,18 @@ true -- !sql_stddev_Integer_agg_phase_3 -- 0 \N -7 2.0 -5 1.4142135623730954 +7 1.9999999999999998 +5 1.4142135623730951 -- !sql_stddev_Integer_agg_phase_4 -- 12 3.452052529534663 -- !sql_stddev_Integer_gb_notnull -- -1.9999999999999998 +2.0 1.4142135623730951 -- !sql_stddev_Integer_notnull -- -3.452052529534663 +3.452052529534664 -- !sql_stddev_Integer_agg_phase_1_notnull -- 1 0.0 @@ -3466,7 +3466,7 @@ true -- !sql_stddev_Integer_agg_phase_3_notnull -- 0 \N -7 1.9999999999999998 +7 2.0 5 1.4142135623730951 -- !sql_stddev_Integer_agg_phase_4_notnull -- @@ -3606,7 +3606,7 @@ true -- !sql_stddev_Double_gb -- \N -0.19999999999999996 +0.19999999999999998 0.1414213562373095 -- !sql_stddev_Double -- @@ -3633,10 +3633,10 @@ true -- !sql_stddev_Double_agg_phase_3 -- 0 \N 7 0.19999999999999998 -5 0.1414213562373095 +5 0.14142135623730948 -- !sql_stddev_Double_agg_phase_4 -- -12 0.3452052529534663 +12 0.3452052529534664 -- !sql_stddev_Double_gb_notnull -- 0.19999999999999998 @@ -3665,14 +3665,14 @@ true -- !sql_stddev_Double_agg_phase_3_notnull -- 0 \N 7 0.19999999999999998 -5 0.1414213562373095 +5 0.14142135623730945 -- !sql_stddev_Double_agg_phase_4_notnull -- 12 0.3452052529534663 -- !sql_stddev_DecimalV2_gb -- \N -0.19999999999999998 +0.2 0.14142135623730948 -- !sql_stddev_DecimalV2_agg_phase_1 -- @@ -3691,18 +3691,18 @@ true 1 0.0 -- !sql_stddev_DecimalV2_agg_phase_2 -- -12 0.3452052529534663 +12 0.3452052529534664 -- !sql_stddev_DecimalV2_agg_phase_3 -- 0 \N -7 0.19999999999999996 -5 0.1414213562373095 +7 0.19999999999999998 +5 0.14142135623730948 -- !sql_stddev_DecimalV2_agg_phase_4 -- 12 0.3452052529534663 -- !sql_stddev_DecimalV2_gb_notnull -- -0.19999999999999998 +0.2 0.1414213562373095 -- !sql_stddev_DecimalV2_agg_phase_1_notnull -- @@ -3724,7 +3724,7 @@ true -- !sql_stddev_DecimalV2_agg_phase_3_notnull -- 0 \N -7 0.19999999999999998 +7 0.2 5 0.1414213562373095 -- !sql_stddev_DecimalV2_agg_phase_4_notnull -- @@ -3758,7 +3758,7 @@ true -- !sql_stddev_samp_TinyInt_agg_phase_3 -- 0 \N -7 2.1602468994692865 +7 2.160246899469287 5 1.5811388300841898 -- !sql_stddev_samp_TinyInt_agg_phase_4 -- @@ -3824,7 +3824,7 @@ true -- !sql_stddev_samp_SmallInt_agg_phase_3 -- 0 \N -7 2.1602468994692865 +7 2.160246899469287 5 1.5811388300841898 -- !sql_stddev_samp_SmallInt_agg_phase_4 -- @@ -3835,7 +3835,7 @@ true 1.5811388300841898 -- !sql_stddev_samp_SmallInt_notnull -- -3.605551275463989 +3.6055512754639896 -- !sql_stddev_samp_SmallInt_agg_phase_1_notnull -- 1 \N @@ -3857,18 +3857,18 @@ true -- !sql_stddev_samp_SmallInt_agg_phase_3_notnull -- 0 \N 7 2.1602468994692865 -5 1.5811388300841893 +5 1.5811388300841898 -- !sql_stddev_samp_SmallInt_agg_phase_4_notnull -- 12 3.605551275463989 -- !sql_stddev_samp_Integer_gb -- \N -2.1602468994692865 +2.160246899469287 1.5811388300841898 -- !sql_stddev_samp_Integer -- -3.6055512754639896 +3.605551275463989 -- !sql_stddev_samp_Integer_agg_phase_1 -- 0 \N @@ -3897,11 +3897,11 @@ true 12 3.605551275463989 -- !sql_stddev_samp_Integer_gb_notnull -- -2.160246899469287 +2.1602468994692865 1.5811388300841898 -- !sql_stddev_samp_Integer_notnull -- -3.605551275463989 +3.6055512754639896 -- !sql_stddev_samp_Integer_agg_phase_1_notnull -- 1 \N @@ -3930,11 +3930,11 @@ true -- !sql_stddev_samp_BigInt_gb -- \N -2.1602468994692865 +2.160246899469287 1.5811388300841898 -- !sql_stddev_samp_BigInt -- -3.6055512754639896 +3.605551275463989 -- !sql_stddev_samp_BigInt_agg_phase_1 -- 0 \N @@ -3956,7 +3956,7 @@ true -- !sql_stddev_samp_BigInt_agg_phase_3 -- 0 \N -7 2.1602468994692865 +7 2.160246899469287 5 1.5811388300841898 -- !sql_stddev_samp_BigInt_agg_phase_4 -- @@ -3988,7 +3988,7 @@ true -- !sql_stddev_samp_BigInt_agg_phase_3_notnull -- 0 \N -7 2.1602468994692865 +7 2.160246899469287 5 1.5811388300841898 -- !sql_stddev_samp_BigInt_agg_phase_4_notnull -- @@ -4022,14 +4022,14 @@ true -- !sql_stddev_samp_Float_agg_phase_3 -- 0 \N -7 0.2160246891421743 +7 0.21602468914217424 5 0.15811390185706375 -- !sql_stddev_samp_Float_agg_phase_4 -- 12 0.3605551333887302 -- !sql_stddev_samp_Float_gb_notnull -- -0.2160246891421743 +0.21602468914217424 0.15811390185706375 -- !sql_stddev_samp_Float_notnull -- @@ -4055,7 +4055,7 @@ true -- !sql_stddev_samp_Float_agg_phase_3_notnull -- 0 \N 7 0.2160246891421743 -5 0.15811390185706375 +5 0.15811390185706373 -- !sql_stddev_samp_Float_agg_phase_4_notnull -- 12 0.3605551333887302 @@ -4063,7 +4063,7 @@ true -- !sql_stddev_samp_Double_gb -- \N 0.21602468994692867 -0.15811388300841894 +0.15811388300841897 -- !sql_stddev_samp_Double -- 0.36055512754639896 @@ -4088,7 +4088,7 @@ true -- !sql_stddev_samp_Double_agg_phase_3 -- 0 \N -7 0.21602468994692864 +7 0.21602468994692867 5 0.15811388300841897 -- !sql_stddev_samp_Double_agg_phase_4 -- @@ -4096,7 +4096,7 @@ true -- !sql_stddev_samp_Double_gb_notnull -- 0.21602468994692867 -0.15811388300841892 +0.15811388300841894 -- !sql_stddev_samp_Double_notnull -- 0.36055512754639896 @@ -4120,7 +4120,7 @@ true -- !sql_stddev_samp_Double_agg_phase_3_notnull -- 0 \N -7 0.21602468994692867 +7 0.21602468994692864 5 0.15811388300841894 -- !sql_stddev_samp_Double_agg_phase_4_notnull -- @@ -4146,7 +4146,7 @@ true -- !sql_stddev_samp_DecimalV2_agg_phase_3 -- 0 \N -7 0.21602468994692867 +7 0.21602468994692864 5 0.15811388300841897 -- !sql_stddev_samp_DecimalV2_agg_phase_4 -- @@ -4172,7 +4172,7 @@ true -- !sql_stddev_samp_DecimalV2_agg_phase_3_notnull -- 0 \N 7 0.21602468994692867 -5 0.15811388300841892 +5 0.15811388300841897 -- !sql_stddev_samp_DecimalV2_agg_phase_4_notnull -- 12 0.36055512754639896 @@ -4487,11 +4487,11 @@ true -- !sql_sum_Double_agg_phase_3 -- 0 \N -7 2.8 +7 2.8000000000000003 5 5.0 -- !sql_sum_Double_agg_phase_4 -- -12 7.8 +12 7.800000000000001 -- !sql_sum_Double_gb_notnull -- 2.8 @@ -4515,11 +4515,11 @@ true 1 1.2 -- !sql_sum_Double_agg_phase_2_notnull -- -12 7.800000000000001 +12 7.8 -- !sql_sum_Double_agg_phase_3_notnull -- 0 \N -7 2.8000000000000007 +7 2.8 5 5.0 -- !sql_sum_Double_agg_phase_4_notnull -- @@ -4658,7 +4658,7 @@ true 12 78 -- !sql_topn_Varchar_Integer_gb -- -\N +{"null":1} {"varchar11":3,"varchar13":2,"varchar12":2} {"varchar13":2,"varchar12":2,"varchar11":1} @@ -4666,7 +4666,7 @@ true {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_Varchar_Integer_agg_phase_1 -- -0 \N +0 {"null":1} 1 {"varchar11":1} 1 {"varchar12":1} 1 {"varchar13":1} @@ -4684,7 +4684,7 @@ true 12 {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_Varchar_Integer_agg_phase_3 -- -0 \N +0 {"null":1} 7 {"varchar11":3,"varchar13":2,"varchar12":2} 5 {"varchar13":2,"varchar12":2,"varchar11":1} @@ -4716,7 +4716,7 @@ true 12 {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_Varchar_Integer_agg_phase_3_notnull -- -0 \N +0 {"null":1} 7 {"varchar11":3,"varchar13":2,"varchar12":2} 5 {"varchar13":2,"varchar12":2,"varchar11":1} @@ -4724,7 +4724,7 @@ true 12 {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_String_Integer_gb -- -\N +{"null":1} {"string1":3,"string3":2,"string2":2} {"string3":2,"string2":2,"string1":1} @@ -4732,7 +4732,7 @@ true {"string3":4,"string2":4,"string1":4} -- !sql_topn_String_Integer_agg_phase_1 -- -0 \N +0 {"null":1} 1 {"string1":1} 1 {"string2":1} 1 {"string3":1} @@ -4750,7 +4750,7 @@ true 12 {"string3":4,"string2":4,"string1":4} -- !sql_topn_String_Integer_agg_phase_3 -- -0 \N +0 {"null":1} 7 {"string1":3,"string3":2,"string2":2} 5 {"string3":2,"string2":2,"string1":1} @@ -4782,7 +4782,7 @@ true 12 {"string3":4,"string2":4,"string1":4} -- !sql_topn_String_Integer_agg_phase_3_notnull -- -0 \N +0 {"null":1} 7 {"string1":3,"string3":2,"string2":2} 5 {"string3":2,"string2":2,"string1":1} @@ -4790,7 +4790,7 @@ true 12 {"string3":4,"string2":4,"string1":4} -- !sql_topn_Varchar_Integer_Integer_gb -- -\N +{"null":1} {"varchar11":3,"varchar13":2,"varchar12":2} {"varchar13":2,"varchar12":2,"varchar11":1} @@ -4798,7 +4798,7 @@ true {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_Varchar_Integer_Integer_agg_phase_1 -- -0 \N +0 {"null":1} 1 {"varchar11":1} 1 {"varchar12":1} 1 {"varchar13":1} @@ -4816,7 +4816,7 @@ true 12 {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_Varchar_Integer_Integer_agg_phase_3 -- -0 \N +0 {"null":1} 7 {"varchar11":3,"varchar13":2,"varchar12":2} 5 {"varchar13":2,"varchar12":2,"varchar11":1} @@ -4848,7 +4848,7 @@ true 12 {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_Varchar_Integer_Integer_agg_phase_3_notnull -- -0 \N +0 {"null":1} 7 {"varchar11":3,"varchar13":2,"varchar12":2} 5 {"varchar13":2,"varchar12":2,"varchar11":1} @@ -4856,7 +4856,7 @@ true 12 {"varchar13":4,"varchar12":4,"varchar11":4} -- !sql_topn_String_Integer_Integer_gb -- -\N +{"null":1} {"string1":3,"string3":2,"string2":2} {"string3":2,"string2":2,"string1":1} @@ -4864,7 +4864,7 @@ true {"string3":4,"string2":4,"string1":4} -- !sql_topn_String_Integer_Integer_agg_phase_1 -- -0 \N +0 {"null":1} 1 {"string1":1} 1 {"string2":1} 1 {"string3":1} @@ -4882,7 +4882,7 @@ true 12 {"string3":4,"string2":4,"string1":4} -- !sql_topn_String_Integer_Integer_agg_phase_3 -- -0 \N +0 {"null":1} 7 {"string1":3,"string3":2,"string2":2} 5 {"string3":2,"string2":2,"string1":1} @@ -4914,7 +4914,7 @@ true 12 {"string3":4,"string2":4,"string1":4} -- !sql_topn_String_Integer_Integer_agg_phase_3_notnull -- -0 \N +0 {"null":1} 7 {"string1":3,"string3":2,"string2":2} 5 {"string3":2,"string2":2,"string1":1} @@ -4927,7 +4927,7 @@ true 2.0 -- !sql_variance_TinyInt -- -11.916666666666666 +11.91666666666667 -- !sql_variance_TinyInt_agg_phase_1 -- 0 \N @@ -4950,7 +4950,7 @@ true -- !sql_variance_TinyInt_agg_phase_3 -- 0 \N 7 4.0 -5 2.000000000000001 +5 2.0 -- !sql_variance_TinyInt_agg_phase_4 -- 12 11.916666666666666 @@ -4960,7 +4960,7 @@ true 2.0 -- !sql_variance_TinyInt_notnull -- -11.91666666666667 +11.916666666666666 -- !sql_variance_TinyInt_agg_phase_1_notnull -- 1 0.0 @@ -4981,7 +4981,7 @@ true -- !sql_variance_TinyInt_agg_phase_3_notnull -- 0 \N -7 4.0 +7 4.000000000000001 5 2.0 -- !sql_variance_TinyInt_agg_phase_4_notnull -- @@ -4993,7 +4993,7 @@ true 2.0 -- !sql_variance_SmallInt -- -11.91666666666667 +11.916666666666666 -- !sql_variance_SmallInt_agg_phase_1 -- 0 \N @@ -5026,7 +5026,7 @@ true 2.0 -- !sql_variance_SmallInt_notnull -- -11.91666666666667 +11.916666666666666 -- !sql_variance_SmallInt_agg_phase_1_notnull -- 1 0.0 @@ -5047,8 +5047,8 @@ true -- !sql_variance_SmallInt_agg_phase_3_notnull -- 0 \N -7 3.9999999999999996 -5 1.9999999999999993 +7 4.0 +5 2.0 -- !sql_variance_SmallInt_agg_phase_4_notnull -- 12 11.916666666666666 @@ -5059,7 +5059,7 @@ true 2.0 -- !sql_variance_Integer -- -11.91666666666667 +11.916666666666666 -- !sql_variance_Integer_agg_phase_1 -- 0 \N @@ -5081,7 +5081,7 @@ true -- !sql_variance_Integer_agg_phase_3 -- 0 \N -7 4.0 +7 4.000000000000001 5 2.0 -- !sql_variance_Integer_agg_phase_4 -- @@ -5092,7 +5092,7 @@ true 2.0 -- !sql_variance_Integer_notnull -- -11.91666666666667 +11.916666666666666 -- !sql_variance_Integer_agg_phase_1_notnull -- 1 0.0 @@ -5109,7 +5109,7 @@ true 1 0.0 -- !sql_variance_Integer_agg_phase_2_notnull -- -12 11.916666666666666 +12 11.916666666666664 -- !sql_variance_Integer_agg_phase_3_notnull -- 0 \N @@ -5125,7 +5125,7 @@ true 2.0 -- !sql_variance_BigInt -- -11.916666666666666 +11.91666666666667 -- !sql_variance_BigInt_agg_phase_1 -- 0 \N @@ -5147,18 +5147,18 @@ true -- !sql_variance_BigInt_agg_phase_3 -- 0 \N -7 4.000000000000001 -5 2.0 +7 3.9999999999999996 +5 1.9999999999999993 -- !sql_variance_BigInt_agg_phase_4 -- 12 11.916666666666666 -- !sql_variance_BigInt_gb_notnull -- -4.0 +3.9999999999999996 2.0 -- !sql_variance_BigInt_notnull -- -11.916666666666666 +11.91666666666667 -- !sql_variance_BigInt_agg_phase_1_notnull -- 1 0.0 @@ -5179,19 +5179,19 @@ true -- !sql_variance_BigInt_agg_phase_3_notnull -- 0 \N -7 4.0 -5 2.0 +7 3.9999999999999996 +5 1.9999999999999993 -- !sql_variance_BigInt_agg_phase_4_notnull -- 12 11.916666666666666 -- !sql_variance_Float_gb -- \N -0.039999999701976874 +0.03999999970197688 0.020000004768372152 -- !sql_variance_Float -- -0.11916667052855125 +0.11916667052855127 -- !sql_variance_Float_agg_phase_1 -- 0 \N @@ -5220,7 +5220,7 @@ true 12 0.11916667052855125 -- !sql_variance_Float_gb_notnull -- -0.03999999970197688 +0.039999999701976874 0.020000004768372152 -- !sql_variance_Float_notnull -- @@ -5253,11 +5253,11 @@ true -- !sql_variance_Double_gb -- \N -0.04 +0.03999999999999999 0.019999999999999997 -- !sql_variance_Double -- -0.11916666666666666 +0.11916666666666668 -- !sql_variance_Double_agg_phase_1 -- 0 \N @@ -5275,18 +5275,18 @@ true 1 0.0 -- !sql_variance_Double_agg_phase_2 -- -12 0.11916666666666668 +12 0.11916666666666666 -- !sql_variance_Double_agg_phase_3 -- 0 \N -7 0.039999999999999994 +7 0.03999999999999999 5 0.019999999999999997 -- !sql_variance_Double_agg_phase_4 -- -12 0.11916666666666666 +12 0.1191666666666667 -- !sql_variance_Double_gb_notnull -- -0.039999999999999994 +0.04 0.019999999999999997 -- !sql_variance_Double_notnull -- @@ -5307,19 +5307,19 @@ true 1 0.0 -- !sql_variance_Double_agg_phase_2_notnull -- -12 0.11916666666666666 +12 0.11916666666666668 -- !sql_variance_Double_agg_phase_3_notnull -- 0 \N -7 0.039999999999999994 +7 0.03999999999999999 5 0.02 -- !sql_variance_Double_agg_phase_4_notnull -- -12 0.11916666666666668 +12 0.11916666666666666 -- !sql_variance_DecimalV2_gb -- \N -0.04 +0.039999999999999994 0.019999999999999997 -- !sql_variance_DecimalV2 -- @@ -5341,22 +5341,22 @@ true 1 0.0 -- !sql_variance_DecimalV2_agg_phase_2 -- -12 0.11916666666666666 +12 0.11916666666666668 -- !sql_variance_DecimalV2_agg_phase_3 -- 0 \N -7 0.03999999999999999 -5 0.019999999999999997 +7 0.039999999999999994 +5 0.02 -- !sql_variance_DecimalV2_agg_phase_4 -- -12 0.11916666666666666 +12 0.1191666666666667 -- !sql_variance_DecimalV2_gb_notnull -- 0.03999999999999999 -0.02 +0.019999999999999997 -- !sql_variance_DecimalV2_notnull -- -0.11916666666666664 +0.11916666666666666 -- !sql_variance_DecimalV2_agg_phase_1_notnull -- 1 0.0 @@ -5378,10 +5378,10 @@ true -- !sql_variance_DecimalV2_agg_phase_3_notnull -- 0 \N 7 0.03999999999999999 -5 0.019999999999999997 +5 0.019999999999999993 -- !sql_variance_DecimalV2_agg_phase_4_notnull -- -12 0.11916666666666668 +12 0.11916666666666666 -- !sql_variance_samp_TinyInt_gb -- \N @@ -5389,7 +5389,7 @@ true 2.5 -- !sql_variance_samp_TinyInt -- -13.000000000000002 +13.0 -- !sql_variance_samp_TinyInt_agg_phase_1 -- 0 \N @@ -5407,11 +5407,11 @@ true 1 \N -- !sql_variance_samp_TinyInt_agg_phase_2 -- -12 13.0 +12 12.999999999999998 -- !sql_variance_samp_TinyInt_agg_phase_3 -- 0 \N -7 4.666666666666666 +7 4.666666666666667 5 2.5 -- !sql_variance_samp_TinyInt_agg_phase_4 -- @@ -5422,7 +5422,7 @@ true 2.5 -- !sql_variance_samp_TinyInt_notnull -- -13.0 +13.000000000000002 -- !sql_variance_samp_TinyInt_agg_phase_1_notnull -- 1 \N @@ -5443,7 +5443,7 @@ true -- !sql_variance_samp_TinyInt_agg_phase_3_notnull -- 0 \N -7 4.666666666666666 +7 4.666666666666667 5 2.5 -- !sql_variance_samp_TinyInt_agg_phase_4_notnull -- @@ -5477,7 +5477,7 @@ true -- !sql_variance_samp_SmallInt_agg_phase_3 -- 0 \N -7 4.666666666666668 +7 4.666666666666667 5 2.5 -- !sql_variance_samp_SmallInt_agg_phase_4 -- @@ -5488,7 +5488,7 @@ true 2.5 -- !sql_variance_samp_SmallInt_notnull -- -13.000000000000002 +13.0 -- !sql_variance_samp_SmallInt_agg_phase_1_notnull -- 1 \N @@ -5505,7 +5505,7 @@ true 1 \N -- !sql_variance_samp_SmallInt_agg_phase_2_notnull -- -12 13.0 +12 12.999999999999998 -- !sql_variance_samp_SmallInt_agg_phase_3_notnull -- 0 \N @@ -5513,7 +5513,7 @@ true 5 2.5 -- !sql_variance_samp_SmallInt_agg_phase_4_notnull -- -12 13.0 +12 12.999999999999998 -- !sql_variance_samp_Integer_gb -- \N @@ -5521,7 +5521,7 @@ true 2.5 -- !sql_variance_samp_Integer -- -13.000000000000002 +13.0 -- !sql_variance_samp_Integer_agg_phase_1 -- 0 \N @@ -5543,7 +5543,7 @@ true -- !sql_variance_samp_Integer_agg_phase_3 -- 0 \N -7 4.666666666666668 +7 4.666666666666667 5 2.5 -- !sql_variance_samp_Integer_agg_phase_4 -- @@ -5554,7 +5554,7 @@ true 2.5 -- !sql_variance_samp_Integer_notnull -- -13.0 +13.000000000000002 -- !sql_variance_samp_Integer_agg_phase_1_notnull -- 1 \N @@ -5571,23 +5571,23 @@ true 1 \N -- !sql_variance_samp_Integer_agg_phase_2_notnull -- -12 13.0 +12 12.999999999999998 -- !sql_variance_samp_Integer_agg_phase_3_notnull -- 0 \N -7 4.666666666666666 -5 2.5000000000000004 +7 4.666666666666667 +5 2.5 -- !sql_variance_samp_Integer_agg_phase_4_notnull -- 12 13.0 -- !sql_variance_samp_BigInt_gb -- \N -4.666666666666667 +4.666666666666666 2.5 -- !sql_variance_samp_BigInt -- -13.0 +13.000000000000002 -- !sql_variance_samp_BigInt_agg_phase_1 -- 0 \N @@ -5609,8 +5609,8 @@ true -- !sql_variance_samp_BigInt_agg_phase_3 -- 0 \N -7 4.666666666666667 -5 2.5000000000000004 +7 4.666666666666666 +5 2.499999999999999 -- !sql_variance_samp_BigInt_agg_phase_4 -- 12 13.0 @@ -5620,7 +5620,7 @@ true 2.5 -- !sql_variance_samp_BigInt_notnull -- -13.000000000000002 +13.0 -- !sql_variance_samp_BigInt_agg_phase_1_notnull -- 1 \N @@ -5641,8 +5641,8 @@ true -- !sql_variance_samp_BigInt_agg_phase_3_notnull -- 0 \N -7 4.666666666666667 -5 2.5000000000000004 +7 4.666666666666668 +5 2.5 -- !sql_variance_samp_BigInt_agg_phase_4_notnull -- 12 13.0 @@ -5676,14 +5676,14 @@ true -- !sql_variance_samp_Float_agg_phase_3 -- 0 \N 7 0.04666666631897303 -5 0.02500000596046519 +5 0.025000005960465182 -- !sql_variance_samp_Float_agg_phase_4 -- 12 0.130000004212965 -- !sql_variance_samp_Float_gb_notnull -- -0.04666666631897303 -0.025000005960465185 +0.04666666631897302 +0.02500000596046519 -- !sql_variance_samp_Float_notnull -- 0.130000004212965 @@ -5707,8 +5707,8 @@ true -- !sql_variance_samp_Float_agg_phase_3_notnull -- 0 \N -7 0.04666666631897303 -5 0.025000005960465182 +7 0.04666666631897302 +5 0.025000005960465192 -- !sql_variance_samp_Float_agg_phase_4_notnull -- 12 0.130000004212965 @@ -5719,7 +5719,7 @@ true 0.024999999999999994 -- !sql_variance_samp_Double -- -0.12999999999999998 +0.13 -- !sql_variance_samp_Double_agg_phase_1 -- 0 \N @@ -5737,19 +5737,19 @@ true 1 \N -- !sql_variance_samp_Double_agg_phase_2 -- -12 0.13 +12 0.13000000000000003 -- !sql_variance_samp_Double_agg_phase_3 -- 0 \N 7 0.046666666666666655 -5 0.025 +5 0.02499999999999999 -- !sql_variance_samp_Double_agg_phase_4 -- -12 0.13 +12 0.13000000000000003 -- !sql_variance_samp_Double_gb_notnull -- -0.04666666666666667 -0.024999999999999994 +0.04666666666666666 +0.024999999999999988 -- !sql_variance_samp_Double_notnull -- 0.13 @@ -5773,11 +5773,11 @@ true -- !sql_variance_samp_Double_agg_phase_3_notnull -- 0 \N -7 0.04666666666666666 -5 0.02499999999999999 +7 0.04666666666666667 +5 0.025 -- !sql_variance_samp_Double_agg_phase_4_notnull -- -12 0.13 +12 0.13000000000000003 -- !sql_variance_samp_DecimalV2_agg_phase_1 -- 0 \N @@ -5799,8 +5799,8 @@ true -- !sql_variance_samp_DecimalV2_agg_phase_3 -- 0 \N -7 0.04666666666666666 -5 0.025 +7 0.046666666666666655 +5 0.02499999999999999 -- !sql_variance_samp_DecimalV2_agg_phase_4 -- 12 0.13 @@ -5824,8 +5824,8 @@ true -- !sql_variance_samp_DecimalV2_agg_phase_3_notnull -- 0 \N -7 0.04666666666666666 -5 0.025 +7 0.046666666666666655 +5 0.024999999999999994 -- !sql_variance_samp_DecimalV2_agg_phase_4_notnull -- 12 0.13 diff --git a/regression-test/data/nereids_function_p0/scalar_function/A.out b/regression-test/data/nereids_function_p0/scalar_function/A.out index 1cfd0498c37c5e..13d9bcccb6e1bd 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/A.out +++ b/regression-test/data/nereids_function_p0/scalar_function/A.out @@ -319,7 +319,7 @@ nan \N -- !sql_ascii_Varchar -- -\N +110 118 118 118 @@ -348,7 +348,7 @@ nan 118 -- !sql_ascii_String -- -\N +110 115 115 115 diff --git a/regression-test/data/nereids_function_p0/scalar_function/Array.out b/regression-test/data/nereids_function_p0/scalar_function/Array.out index 63128f0acef76b..4c5c5b93d9c9a8 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/Array.out +++ b/regression-test/data/nereids_function_p0/scalar_function/Array.out @@ -9541,7 +9541,6 @@ true 2 -- !sql_split_by_string_Char -- -\N ["char11"] ["char11"] ["char11"] @@ -9554,6 +9553,7 @@ true ["char13"] ["char13"] ["char13"] +["null"] -- !sql_split_by_string_Char_notnull -- ["char11"] @@ -9570,7 +9570,7 @@ true ["char13"] -- !sql_split_by_string_VarChar -- -\N +["null"] ["varchar11"] ["varchar11"] ["varchar11"] @@ -9599,7 +9599,7 @@ true ["varchar13"] -- !sql_split_by_string_String -- -\N +["null"] ["string1"] ["string1"] ["string1"] @@ -9628,7 +9628,6 @@ true ["string3"] -- !sql_tokenize_Char -- -\N ["char11"] ["char11"] ["char11"] @@ -9641,6 +9640,7 @@ true ["char13"] ["char13"] ["char13"] +["null"] -- !sql_tokenize_Char_notnull -- ["char11"] @@ -9686,7 +9686,7 @@ true ["varchar13"] -- !sql_tokenize_String -- -\N +["null"] ["string1"] ["string1"] ["string1"] diff --git a/regression-test/data/nereids_function_p0/scalar_function/B.out b/regression-test/data/nereids_function_p0/scalar_function/B.out index 9866b06b929a62..04a79b36a7a378 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/B.out +++ b/regression-test/data/nereids_function_p0/scalar_function/B.out @@ -29,7 +29,7 @@ 1100 -- !sql_bit_length_Varchar -- -\N +32 72 72 72 @@ -58,7 +58,7 @@ 72 -- !sql_bit_length_String -- -\N +32 56 56 56 diff --git a/regression-test/data/nereids_function_p0/scalar_function/C.out b/regression-test/data/nereids_function_p0/scalar_function/C.out index 02d99dc77a7ecb..42a41c21b0bc86 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/C.out +++ b/regression-test/data/nereids_function_p0/scalar_function/C.out @@ -232,7 +232,7 @@ 1.2 -- !sql_character_length_Varchar -- -\N +4 9 9 9 @@ -261,7 +261,7 @@ 9 -- !sql_character_length_String -- -\N +4 7 7 7 @@ -696,7 +696,7 @@ true \N -- !sql_coalesce_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -725,7 +725,7 @@ varchar13 varchar13 -- !sql_coalesce_String -- -\N +null string1 string1 string1 @@ -754,7 +754,7 @@ string3 string3 -- !sql_concat_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -783,7 +783,7 @@ varchar13 varchar13 -- !sql_concat_String -- -\N +null string1 string1 string1 @@ -812,7 +812,7 @@ string3 string3 -- !sql_concat_ws_Varchar -- - +null varchar11 varchar11 varchar11 @@ -841,7 +841,7 @@ varchar13 varchar13 -- !sql_concat_ws_String -- - +null string1 string1 string1 @@ -986,7 +986,7 @@ char13, char23, char33, varchar13, varchar23, varchar33 0 -- !sql_convert_to_Varchar_Varchar -- -\N +null varchar11 varchar11 varchar11 diff --git a/regression-test/data/nereids_function_p0/scalar_function/D.out b/regression-test/data/nereids_function_p0/scalar_function/D.out index 9d9ed55dd4441f..ec685e5982b5df 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/D.out +++ b/regression-test/data/nereids_function_p0/scalar_function/D.out @@ -2842,7 +2842,7 @@ Monday 0.07918124604762482 -- !sql_domain_String -- -\N + @@ -2871,7 +2871,7 @@ Monday -- !sql_domain_without_www_String -- -\N + diff --git a/regression-test/data/nereids_function_p0/scalar_function/E.out b/regression-test/data/nereids_function_p0/scalar_function/E.out index 9aebea9fdce2c4..f543bd22d9be7d 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/E.out +++ b/regression-test/data/nereids_function_p0/scalar_function/E.out @@ -58,7 +58,7 @@ string1 \N -- !sql_ends_with_Varchar_Varchar -- -\N +true true true true @@ -87,7 +87,7 @@ true true -- !sql_ends_with_String_String -- -\N +true true true true @@ -145,7 +145,7 @@ true 3.3201169227365472 -- !sql_extract_url_parameter_Varchar_Varchar -- -\N + diff --git a/regression-test/data/nereids_function_p0/scalar_function/F.out b/regression-test/data/nereids_function_p0/scalar_function/F.out index 0e9ef07f270c0f..200956023370ec 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/F.out +++ b/regression-test/data/nereids_function_p0/scalar_function/F.out @@ -290,7 +290,7 @@ 0 -- !sql_field_Varchar -- -\N +0 0 0 0 @@ -319,7 +319,7 @@ 0 -- !sql_field_String -- -\N +0 0 0 0 @@ -348,7 +348,7 @@ 0 -- !sql_find_in_set_Varchar_Varchar -- -\N +1 1 1 1 @@ -377,7 +377,7 @@ 1 -- !sql_find_in_set_String_String -- -\N +1 1 1 1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/G.out b/regression-test/data/nereids_function_p0/scalar_function/G.out index ec40415caa04bb..b1a83bfd9db87e 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/G.out +++ b/regression-test/data/nereids_function_p0/scalar_function/G.out @@ -351,7 +351,7 @@ 2012-03-12T12:11:12 -- !sql_greatest_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -380,7 +380,7 @@ varchar13 varchar13 -- !sql_greatest_String -- -\N +null string1 string1 string1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/H.out b/regression-test/data/nereids_function_p0/scalar_function/H.out index 04e394186d5698..08425584180782 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/H.out +++ b/regression-test/data/nereids_function_p0/scalar_function/H.out @@ -29,7 +29,7 @@ B C -- !sql_hex_Varchar -- -\N +6E756C6C 766172636861723131 766172636861723131 766172636861723131 @@ -58,7 +58,7 @@ C 766172636861723133 -- !sql_hex_String -- -\N +6E756C6C 737472696E6731 737472696E6731 737472696E6731 diff --git a/regression-test/data/nereids_function_p0/scalar_function/I.out b/regression-test/data/nereids_function_p0/scalar_function/I.out index 046fee12da9652..b0061f3e678b51 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/I.out +++ b/regression-test/data/nereids_function_p0/scalar_function/I.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !sql_initcap_Varchar -- -\N +Null Varchar11 Varchar11 Varchar11 @@ -29,7 +29,7 @@ Varchar13 Varchar13 -- !sql_instr_Varchar_Varchar -- -\N +1 1 1 1 @@ -58,7 +58,7 @@ Varchar13 1 -- !sql_instr_String_String -- -\N +1 1 1 1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/L.out b/regression-test/data/nereids_function_p0/scalar_function/L.out index 9550166d43e2f4..3a9ea2a1687c03 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/L.out +++ b/regression-test/data/nereids_function_p0/scalar_function/L.out @@ -464,7 +464,7 @@ 1.200 -- !sql_least_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -493,7 +493,7 @@ varchar13 varchar13 -- !sql_least_String -- -\N +null string1 string1 string1 @@ -580,7 +580,7 @@ string3 string3 -- !sql_length_Varchar -- -\N +4 9 9 9 @@ -609,7 +609,7 @@ string3 9 -- !sql_length_String -- -\N +4 7 7 7 @@ -667,7 +667,7 @@ string3 0.1823215567939546 -- !sql_locate_Varchar_Varchar -- -\N +1 1 1 1 @@ -696,7 +696,7 @@ string3 1 -- !sql_locate_String_String -- -\N +1 1 1 1 @@ -870,7 +870,7 @@ string3 0.2630344058337938 -- !sql_lower_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -899,7 +899,7 @@ varchar13 varchar13 -- !sql_lower_String -- -\N +null string1 string1 string1 @@ -986,7 +986,7 @@ ststring3 strinstring3 -- !sql_ltrim_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -1015,7 +1015,7 @@ varchar13 varchar13 -- !sql_ltrim_String -- -\N +null string1 string1 string1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/M.out b/regression-test/data/nereids_function_p0/scalar_function/M.out index 1c92e8a504d1ac..7f3edf72a29bb2 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/M.out +++ b/regression-test/data/nereids_function_p0/scalar_function/M.out @@ -29,7 +29,7 @@ 0012-01-12 -- !sql_mask_Varchar -- -\N +xxxx xxxxxxxnn xxxxxxxnn xxxxxxxnn @@ -58,7 +58,7 @@ xxxxxxxnn xxxxxxxnn -- !sql_mask_String -- -\N +xxxx xxxxxxn xxxxxxn xxxxxxn @@ -87,7 +87,7 @@ xxxxxxn xxxxxxn -- !sql_mask_first_n_Varchar -- -\N +xxxx xxxxxxxnn xxxxxxxnn xxxxxxxnn @@ -116,7 +116,7 @@ xxxxxxxnn xxxxxxxnn -- !sql_mask_first_n_String -- -\N +xxxx xxxxxxn xxxxxxn xxxxxxn @@ -145,7 +145,7 @@ xxxxxxn xxxxxxn -- !sql_mask_last_n_Varchar -- -\N +xxxx xxxxxxxnn xxxxxxxnn xxxxxxxnn @@ -174,7 +174,7 @@ xxxxxxxnn xxxxxxxnn -- !sql_mask_last_n_String -- -\N +xxxx xxxxxxn xxxxxxn xxxxxxn @@ -203,7 +203,7 @@ xxxxxxn xxxxxxn -- !sql_md5_Varchar -- -\N +37a6259cc0c1dae299a7866489dff0bd 8507af7854a1ef7feca8e5cdbce5e613 8507af7854a1ef7feca8e5cdbce5e613 8507af7854a1ef7feca8e5cdbce5e613 @@ -232,7 +232,7 @@ dc5f80c371451aa4fc81c930adc64f60 73c837059107caff646284f85ac126c3 -- !sql_md5_String -- -\N +37a6259cc0c1dae299a7866489dff0bd 34b577be20fbc15477aadb9a08101ff9 34b577be20fbc15477aadb9a08101ff9 34b577be20fbc15477aadb9a08101ff9 @@ -261,7 +261,7 @@ dc5f80c371451aa4fc81c930adc64f60 9e6dc8685bf3c1b338f2011ace904887 -- !sql_md5sum_Varchar -- -\N +37a6259cc0c1dae299a7866489dff0bd 8507af7854a1ef7feca8e5cdbce5e613 8507af7854a1ef7feca8e5cdbce5e613 8507af7854a1ef7feca8e5cdbce5e613 @@ -290,7 +290,7 @@ dc5f80c371451aa4fc81c930adc64f60 73c837059107caff646284f85ac126c3 -- !sql_md5sum_String -- -\N +37a6259cc0c1dae299a7866489dff0bd 34b577be20fbc15477aadb9a08101ff9 34b577be20fbc15477aadb9a08101ff9 34b577be20fbc15477aadb9a08101ff9 @@ -2842,7 +2842,7 @@ March 2011-03-12 -- !sql_murmur_hash3_32_Varchar -- -\N +-460753941 1382328699 1382328699 1382328699 @@ -2871,7 +2871,7 @@ March -1704245912 -- !sql_murmur_hash3_32_String -- -\N +-460753941 1105201137 1105201137 1105201137 @@ -2900,7 +2900,7 @@ March -1328370272 -- !sql_murmur_hash3_64_Varchar -- -\N +-202406785360068664 8683832286507167489 8683832286507167489 8683832286507167489 @@ -2929,7 +2929,7 @@ March 3951582574031518507 -- !sql_murmur_hash3_64_String -- -\N +-202406785360068664 -7629742527255288609 -7629742527255288609 -7629742527255288609 diff --git a/regression-test/data/nereids_function_p0/scalar_function/N.out b/regression-test/data/nereids_function_p0/scalar_function/N.out index abaa00cf959534..1cfd80275cd9d5 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/N.out +++ b/regression-test/data/nereids_function_p0/scalar_function/N.out @@ -87,7 +87,7 @@ -1.200 -- !sql_not_null_or_empty_Varchar -- -false +true true true true @@ -116,7 +116,7 @@ true true -- !sql_not_null_or_empty_String -- -false +true true true true @@ -145,7 +145,7 @@ true true -- !sql_null_or_empty_Varchar -- -true +false false false false @@ -174,7 +174,7 @@ false false -- !sql_null_or_empty_String -- -true +false false false false @@ -1102,7 +1102,7 @@ true \N -- !sql_nvl_Varchar_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -1131,7 +1131,7 @@ varchar13 varchar13 -- !sql_nvl_String_String -- -\N +null string1 string1 string1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/P.out b/regression-test/data/nereids_function_p0/scalar_function/P.out index 445163695e3f1c..e69cf1f91d1f4f 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/P.out +++ b/regression-test/data/nereids_function_p0/scalar_function/P.out @@ -206,7 +206,7 @@ 1.2445647472039776 -- !sql_protocol_String -- -\N + diff --git a/regression-test/data/nereids_function_p0/scalar_function/R.out b/regression-test/data/nereids_function_p0/scalar_function/R.out index 519be20674034f..3145c2c6dc08fc 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/R.out +++ b/regression-test/data/nereids_function_p0/scalar_function/R.out @@ -87,7 +87,7 @@ -- !sql_regexp_extract_all_Varchar_Varchar -- -\N + @@ -116,7 +116,7 @@ -- !sql_regexp_extract_all_String_String -- -\N + @@ -145,7 +145,7 @@ -- !sql_regexp_replace_Varchar_Varchar_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -174,7 +174,7 @@ varchar13 varchar13 -- !sql_regexp_replace_String_String_String -- -\N +null string1 string1 string1 @@ -203,7 +203,7 @@ string3 string3 -- !sql_regexp_replace_one_Varchar_Varchar_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -232,7 +232,7 @@ varchar13 varchar13 -- !sql_regexp_replace_one_String_String_String -- -\N +null string1 string1 string1 @@ -319,7 +319,7 @@ string3string3string3string3string3string3string3string3string3 string3string3string3string3string3string3string3string3string3string3string3string3 -- !sql_replace_Varchar_Varchar_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -348,7 +348,7 @@ varchar13 varchar13 -- !sql_replace_String_String_String -- -\N +null string1 string1 string1 @@ -377,7 +377,7 @@ string3 string3 -- !sql_right_Varchar_Integer -- -\N + 1 ar11 rchar11 @@ -406,7 +406,7 @@ varchar13 varchar13 -- !sql_right_String_Integer -- -\N + 1 ing1 string1 @@ -957,7 +957,7 @@ string3st string3strin -- !sql_rtrim_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -986,7 +986,7 @@ varchar13 varchar13 -- !sql_rtrim_String -- -\N +null string1 string1 string1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/S.out b/regression-test/data/nereids_function_p0/scalar_function/S.out index c692791944128e..61f2bab42999e4 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/S.out +++ b/regression-test/data/nereids_function_p0/scalar_function/S.out @@ -1015,7 +1015,7 @@ true true -- !sql_sm3_Varchar -- -\N +129a784acb089ea1ab28ff7a0e740d362e75bd5a7486b1a224e3e1c701bdf2fd 96e7a2aa23bf21565359ac989444ccc219d1320adde0b155744f8dcf77e050b0 96e7a2aa23bf21565359ac989444ccc219d1320adde0b155744f8dcf77e050b0 96e7a2aa23bf21565359ac989444ccc219d1320adde0b155744f8dcf77e050b0 @@ -1044,7 +1044,7 @@ c07750bf52dc98f4593882646c11b50c24c70890ea8288a8f1ac7c4a187d6635 c07750bf52dc98f4593882646c11b50c24c70890ea8288a8f1ac7c4a187d6635 -- !sql_sm3_String -- -\N +129a784acb089ea1ab28ff7a0e740d362e75bd5a7486b1a224e3e1c701bdf2fd bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a @@ -1073,7 +1073,7 @@ bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a 1450c7f24515f5566159269a97a95f96d9746fe768a31e069581d69f3f6e48e0 -- !sql_sm3sum_Varchar -- -\N +129a784acb089ea1ab28ff7a0e740d362e75bd5a7486b1a224e3e1c701bdf2fd 96e7a2aa23bf21565359ac989444ccc219d1320adde0b155744f8dcf77e050b0 96e7a2aa23bf21565359ac989444ccc219d1320adde0b155744f8dcf77e050b0 96e7a2aa23bf21565359ac989444ccc219d1320adde0b155744f8dcf77e050b0 @@ -1102,7 +1102,7 @@ c07750bf52dc98f4593882646c11b50c24c70890ea8288a8f1ac7c4a187d6635 c07750bf52dc98f4593882646c11b50c24c70890ea8288a8f1ac7c4a187d6635 -- !sql_sm3sum_String -- -\N +129a784acb089ea1ab28ff7a0e740d362e75bd5a7486b1a224e3e1c701bdf2fd bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a bd9a11352a4d84a4725035750ddea145189a72b6eb89c0e07fa9d3b21d70004a @@ -2264,7 +2264,7 @@ POLYGON ((4 1, 10 4, 9 4, 1 1, 4 1)) POLYGON ((4 4, 45 4, 45 45, 4 45, 4 4)) -- !sql_starts_with_Varchar_Varchar -- -\N +true true true true @@ -2293,7 +2293,7 @@ true true -- !sql_starts_with_String_String -- -\N +true true true true @@ -2438,7 +2438,7 @@ string3 string3 -- !sql_strright_Varchar_Integer -- -\N + 1 ar11 rchar11 @@ -2467,7 +2467,7 @@ varchar13 varchar13 -- !sql_strright_String_Integer -- -\N + 1 ing1 string1 @@ -2757,7 +2757,7 @@ g3 -- !sql_substring_index_Varchar_Varchar_Integer -- -\N +null varchar11 varchar11 varchar11 @@ -2786,7 +2786,7 @@ varchar13 varchar13 -- !sql_substring_index_String_String_Integer -- -\N +null string1 string1 string1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/T.out b/regression-test/data/nereids_function_p0/scalar_function/T.out index 0ca160646d70c7..028d20a7d942e8 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/T.out +++ b/regression-test/data/nereids_function_p0/scalar_function/T.out @@ -348,7 +348,7 @@ 2012-03-12T12:11:12 -- !sql_to_base64_String -- -\N +bnVsbA== c3RyaW5nMQ== c3RyaW5nMQ== c3RyaW5nMQ== @@ -725,7 +725,7 @@ c3RyaW5nMw== 2012-03-12 -- !sql_trim_Varchar -- -\N +null varchar11 varchar11 varchar11 @@ -754,7 +754,7 @@ varchar13 varchar13 -- !sql_trim_String -- -\N +null string1 string1 string1 diff --git a/regression-test/data/nereids_function_p0/scalar_function/U.out b/regression-test/data/nereids_function_p0/scalar_function/U.out index 5f05cfc48dd09b..fa8adc5f85f5aa 100644 --- a/regression-test/data/nereids_function_p0/scalar_function/U.out +++ b/regression-test/data/nereids_function_p0/scalar_function/U.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !sql_unhex_Varchar -- -\N + @@ -29,7 +29,7 @@ -- !sql_unhex_String -- -\N + @@ -174,7 +174,7 @@ 1331481600 -- !sql_unix_timestamp_Varchar_Varchar -- -\N +0 0 0 0 @@ -203,7 +203,7 @@ 0 -- !sql_unix_timestamp_String_String -- -\N +0 0 0 0 @@ -232,7 +232,7 @@ 0 -- !sql_upper_Varchar -- -\N +NULL VARCHAR11 VARCHAR11 VARCHAR11 @@ -261,7 +261,7 @@ VARCHAR13 VARCHAR13 -- !sql_upper_String -- -\N +NULL STRING1 STRING1 STRING1 diff --git a/regression-test/data/nereids_function_p0/window_function/window_fn.out b/regression-test/data/nereids_function_p0/window_function/window_fn.out index 510194d4ea9d69..a541087bb0f8e1 100644 --- a/regression-test/data/nereids_function_p0/window_function/window_fn.out +++ b/regression-test/data/nereids_function_p0/window_function/window_fn.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !sql_count_pb -- -\N 0 +null 0 string1 4 string1 4 string1 4 @@ -15,7 +15,7 @@ string3 4 string3 4 -- !sql_count_pb_ob -- -\N \N 0 +null \N 0 string1 1 1 string1 4 2 string1 7 3 @@ -58,7 +58,7 @@ string3 9 3 string3 12 4 -- !sql_count_f_1 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -87,7 +87,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_2 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -116,7 +116,7 @@ string2 11 3 string3 12 3 -- !sql_count_f_3 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -145,7 +145,7 @@ string2 11 1 string3 12 1 -- !sql_count_f_4 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -174,7 +174,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_5 -- -\N \N 0 +null \N 0 string1 1 3 string2 2 3 string3 3 3 @@ -203,7 +203,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_6 -- -\N \N 0 +null \N 0 string1 1 4 string2 2 4 string3 3 4 @@ -232,7 +232,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_7 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -261,7 +261,7 @@ string2 11 3 string3 12 3 -- !sql_count_f_8 -- -\N \N 0 +null \N 0 string1 1 3 string2 2 3 string3 3 3 @@ -290,7 +290,7 @@ string2 11 3 string3 12 3 -- !sql_count_f_9 -- -\N \N 0 +null \N 0 string1 1 4 string2 2 4 string3 3 4 @@ -319,7 +319,7 @@ string2 11 3 string3 12 3 -- !sql_count_f_10 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -348,7 +348,7 @@ string2 11 1 string3 12 1 -- !sql_count_f_11 -- -\N \N 0 +null \N 0 string1 1 3 string2 2 3 string3 3 3 @@ -377,7 +377,7 @@ string2 11 1 string3 12 1 -- !sql_count_f_12 -- -\N \N 0 +null \N 0 string1 1 4 string2 2 4 string3 3 4 @@ -406,7 +406,7 @@ string2 11 1 string3 12 1 -- !sql_count_f_13 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -435,7 +435,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_14 -- -\N \N 0 +null \N 0 string1 1 1 string2 2 1 string3 3 1 @@ -464,7 +464,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_15 -- -\N \N 0 +null \N 0 string1 1 4 string2 2 4 string3 3 4 @@ -493,7 +493,7 @@ string2 11 4 string3 12 4 -- !sql_count_f_16 -- -\N \N 0 +null \N 0 string1 1 4 string2 2 4 string3 3 4 @@ -522,7 +522,7 @@ string2 11 1 string3 12 1 -- !sql_avg_pb -- -\N \N +null \N string1 5.5 string1 5.5 string1 5.5 @@ -537,7 +537,7 @@ string3 7.5 string3 7.5 -- !sql_avg_pb_ob -- -\N \N \N +null \N \N string1 1 1.0 string1 4 2.5 string1 7 4.0 @@ -580,7 +580,7 @@ string3 9 6.0 string3 12 7.5 -- !sql_avg_f_1 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -609,7 +609,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_2 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -638,7 +638,7 @@ string2 11 8.0 string3 12 9.0 -- !sql_avg_f_3 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -667,7 +667,7 @@ string2 11 11.0 string3 12 12.0 -- !sql_avg_f_4 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -696,7 +696,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_5 -- -\N \N \N +null \N \N string1 1 4.0 string2 2 5.0 string3 3 6.0 @@ -725,7 +725,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_6 -- -\N \N \N +null \N \N string1 1 5.5 string2 2 6.5 string3 3 7.5 @@ -754,7 +754,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_7 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -783,7 +783,7 @@ string2 11 8.0 string3 12 9.0 -- !sql_avg_f_8 -- -\N \N \N +null \N \N string1 1 4.0 string2 2 5.0 string3 3 6.0 @@ -812,7 +812,7 @@ string2 11 8.0 string3 12 9.0 -- !sql_avg_f_9 -- -\N \N \N +null \N \N string1 1 5.5 string2 2 6.5 string3 3 7.5 @@ -841,7 +841,7 @@ string2 11 8.0 string3 12 9.0 -- !sql_avg_f_10 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -870,7 +870,7 @@ string2 11 11.0 string3 12 12.0 -- !sql_avg_f_11 -- -\N \N \N +null \N \N string1 1 4.0 string2 2 5.0 string3 3 6.0 @@ -899,7 +899,7 @@ string2 11 11.0 string3 12 12.0 -- !sql_avg_f_12 -- -\N \N \N +null \N \N string1 1 5.5 string2 2 6.5 string3 3 7.5 @@ -928,7 +928,7 @@ string2 11 11.0 string3 12 12.0 -- !sql_avg_f_13 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -957,7 +957,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_14 -- -\N \N \N +null \N \N string1 1 1.0 string2 2 2.0 string3 3 3.0 @@ -986,7 +986,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_15 -- -\N \N \N +null \N \N string1 1 5.5 string2 2 6.5 string3 3 7.5 @@ -1015,7 +1015,7 @@ string2 11 6.5 string3 12 7.5 -- !sql_avg_f_16 -- -\N \N \N +null \N \N string1 1 5.5 string2 2 6.5 string3 3 7.5 @@ -1044,7 +1044,7 @@ string2 11 11.0 string3 12 12.0 -- !sql_min_pb -- -\N \N +null \N string1 1 string1 1 string1 1 @@ -1059,7 +1059,7 @@ string3 3 string3 3 -- !sql_min_pb_ob -- -\N \N \N +null \N \N string1 1 1 string1 4 1 string1 7 1 @@ -1102,7 +1102,7 @@ string3 9 3 string3 12 3 -- !sql_min_f_1 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1131,7 +1131,7 @@ string2 11 2 string3 12 3 -- !sql_min_f_2 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1160,7 +1160,7 @@ string2 11 2 string3 12 3 -- !sql_min_f_3 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1189,7 +1189,7 @@ string2 11 2 string3 12 3 -- !sql_min_f_4 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1218,7 +1218,7 @@ string2 11 5 string3 12 6 -- !sql_min_f_5 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1247,7 +1247,7 @@ string2 11 5 string3 12 6 -- !sql_min_f_6 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1276,7 +1276,7 @@ string2 11 5 string3 12 6 -- !sql_min_f_7 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1305,7 +1305,7 @@ string2 11 11 string3 12 12 -- !sql_min_f_8 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1334,7 +1334,7 @@ string2 11 11 string3 12 12 -- !sql_min_f_9 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1363,7 +1363,7 @@ string2 11 11 string3 12 12 -- !sql_min_f_10 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1392,7 +1392,7 @@ string2 11 2 string3 12 3 -- !sql_min_f_11 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1421,7 +1421,7 @@ string2 11 2 string3 12 3 -- !sql_min_f_12 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1450,7 +1450,7 @@ string2 11 11 string3 12 12 -- !sql_max_pb -- -\N \N +null \N string1 10 string1 10 string1 10 @@ -1465,7 +1465,7 @@ string3 12 string3 12 -- !sql_max_pb_ob -- -\N \N \N +null \N \N string1 1 1 string1 4 4 string1 7 7 @@ -1508,7 +1508,7 @@ string3 9 9 string3 12 12 -- !sql_max_f_1 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1537,7 +1537,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_2 -- -\N \N \N +null \N \N string1 1 7 string2 2 8 string3 3 9 @@ -1566,7 +1566,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_3 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -1595,7 +1595,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_4 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1624,7 +1624,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_5 -- -\N \N \N +null \N \N string1 1 7 string2 2 8 string3 3 9 @@ -1653,7 +1653,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_6 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -1682,7 +1682,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_7 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1711,7 +1711,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_8 -- -\N \N \N +null \N \N string1 1 7 string2 2 8 string3 3 9 @@ -1740,7 +1740,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_9 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -1769,7 +1769,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_10 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1798,7 +1798,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_11 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -1827,7 +1827,7 @@ string2 11 11 string3 12 12 -- !sql_max_f_12 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -1856,7 +1856,7 @@ string2 11 11 string3 12 12 -- !sql_sum_pb -- -\N \N +null \N string1 22 string1 22 string1 22 @@ -1871,7 +1871,7 @@ string3 30 string3 30 -- !sql_sum_pb_ob -- -\N \N \N +null \N \N string1 1 1 string1 4 5 string1 7 12 @@ -1914,7 +1914,7 @@ string3 9 18 string3 12 30 -- !sql_sum_f_1 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1943,7 +1943,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_2 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -1972,7 +1972,7 @@ string2 11 24 string3 12 27 -- !sql_sum_f_3 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2001,7 +2001,7 @@ string2 11 11 string3 12 12 -- !sql_sum_f_4 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2030,7 +2030,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_5 -- -\N \N \N +null \N \N string1 1 12 string2 2 15 string3 3 18 @@ -2059,7 +2059,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_6 -- -\N \N \N +null \N \N string1 1 22 string2 2 26 string3 3 30 @@ -2088,7 +2088,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_7 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2117,7 +2117,7 @@ string2 11 24 string3 12 27 -- !sql_sum_f_8 -- -\N \N \N +null \N \N string1 1 12 string2 2 15 string3 3 18 @@ -2146,7 +2146,7 @@ string2 11 24 string3 12 27 -- !sql_sum_f_9 -- -\N \N \N +null \N \N string1 1 22 string2 2 26 string3 3 30 @@ -2175,7 +2175,7 @@ string2 11 24 string3 12 27 -- !sql_sum_f_10 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2204,7 +2204,7 @@ string2 11 11 string3 12 12 -- !sql_sum_f_11 -- -\N \N \N +null \N \N string1 1 12 string2 2 15 string3 3 18 @@ -2233,7 +2233,7 @@ string2 11 11 string3 12 12 -- !sql_sum_f_12 -- -\N \N \N +null \N \N string1 1 22 string2 2 26 string3 3 30 @@ -2262,7 +2262,7 @@ string2 11 11 string3 12 12 -- !sql_sum_f_13 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2291,7 +2291,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_14 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2320,7 +2320,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_15 -- -\N \N \N +null \N \N string1 1 22 string2 2 26 string3 3 30 @@ -2349,7 +2349,7 @@ string2 11 26 string3 12 30 -- !sql_sum_f_16 -- -\N \N \N +null \N \N string1 1 22 string2 2 26 string3 3 30 @@ -2378,7 +2378,7 @@ string2 11 11 string3 12 12 -- !sql_dense_rank_pb -- -\N 1 +null 1 string1 1 string1 1 string1 1 @@ -2393,7 +2393,7 @@ string3 1 string3 1 -- !sql_dense_rank_pb_ob -- -\N \N 1 +null \N 1 string1 1 1 string1 4 2 string1 7 3 @@ -2436,7 +2436,7 @@ string3 9 3 string3 12 4 -- !sql_first_value_pb_ob -- -\N \N \N +null \N \N string1 1 1 string1 4 1 string1 7 1 @@ -2465,7 +2465,7 @@ string3 9 3 string3 12 3 -- !sql_first_value_f_1 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2494,7 +2494,7 @@ string2 11 2 string3 12 3 -- !sql_first_value_f_2 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2523,7 +2523,7 @@ string2 11 5 string3 12 6 -- !sql_first_value_f_3 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2552,7 +2552,7 @@ string2 11 11 string3 12 12 -- !sql_first_value_f_4 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2581,7 +2581,7 @@ string2 11 2 string3 12 3 -- !sql_first_value_f_5 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2610,7 +2610,7 @@ string2 11 2 string3 12 3 -- !sql_first_value_f_6 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2639,7 +2639,7 @@ string2 11 2 string3 12 3 -- !sql_first_value_f_7 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2668,7 +2668,7 @@ string2 11 5 string3 12 6 -- !sql_first_value_f_8 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2697,7 +2697,7 @@ string2 11 5 string3 12 6 -- !sql_first_value_f_9 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2726,7 +2726,7 @@ string2 11 5 string3 12 6 -- !sql_first_value_f_10 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2755,7 +2755,7 @@ string2 11 11 string3 12 12 -- !sql_first_value_f_11 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2784,7 +2784,7 @@ string2 11 11 string3 12 12 -- !sql_first_value_f_12 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2813,7 +2813,7 @@ string2 11 11 string3 12 12 -- !sql_first_value_f_13 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2842,7 +2842,7 @@ string2 11 2 string3 12 3 -- !sql_first_value_f_14 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2871,7 +2871,7 @@ string2 11 2 string3 12 3 -- !sql_first_value_f_15 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2900,7 +2900,7 @@ string2 11 2 string3 12 3 -- !sql_lag_pb_ob -- -\N \N 1 +null \N 1 string1 1 1 string1 4 1 string1 7 1 @@ -2929,7 +2929,7 @@ string3 9 3 string3 12 6 -- !sql_last_value_pb_ob -- -\N \N \N +null \N \N string1 1 1 string1 4 4 string1 7 7 @@ -2958,7 +2958,7 @@ string3 9 9 string3 12 12 -- !sql_last_value_f_1 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -2987,7 +2987,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_2 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3016,7 +3016,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_3 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3045,7 +3045,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_4 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3074,7 +3074,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_5 -- -\N \N \N +null \N \N string1 1 7 string2 2 8 string3 3 9 @@ -3103,7 +3103,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_6 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -3132,7 +3132,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_7 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3161,7 +3161,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_8 -- -\N \N \N +null \N \N string1 1 7 string2 2 8 string3 3 9 @@ -3190,7 +3190,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_9 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -3219,7 +3219,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_10 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3248,7 +3248,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_11 -- -\N \N \N +null \N \N string1 1 7 string2 2 8 string3 3 9 @@ -3277,7 +3277,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_12 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -3306,7 +3306,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_13 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3335,7 +3335,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_14 -- -\N \N \N +null \N \N string1 1 1 string2 2 2 string3 3 3 @@ -3364,7 +3364,7 @@ string2 11 11 string3 12 12 -- !sql_last_value_f_15 -- -\N \N \N +null \N \N string1 1 10 string2 2 11 string3 3 12 @@ -3393,7 +3393,7 @@ string2 11 11 string3 12 12 -- !sql_lead_pb_ob -- -\N \N 1 +null \N 1 string1 1 7 string1 4 10 string1 7 1 @@ -3422,7 +3422,7 @@ string3 9 1 string3 12 1 -- !sql_ntile_pb -- -\N 1 +null 1 string1 1 string1 1 string1 2 @@ -3437,7 +3437,7 @@ string3 2 string3 3 -- !sql_ntile_pb_ob -- -\N \N 1 +null \N 1 string1 1 1 string1 4 1 string1 7 2 @@ -3480,7 +3480,7 @@ string3 9 2 string3 12 3 -- !sql_rank_pb -- -\N 1 +null 1 string1 1 string1 1 string1 1 @@ -3495,7 +3495,7 @@ string3 1 string3 1 -- !sql_rank_pb_ob -- -\N \N 1 +null \N 1 string1 1 1 string1 4 2 string1 7 3 @@ -3538,7 +3538,7 @@ string3 9 3 string3 12 4 -- !sql_row_number_pb -- -\N 1 +null 1 string1 1 string1 2 string1 3 @@ -3553,7 +3553,7 @@ string3 3 string3 4 -- !sql_row_number_pb_ob -- -\N \N 1 +null \N 1 string1 1 1 string1 4 2 string1 7 3 diff --git a/regression-test/data/nereids_p0/insert_into_table/aggregate.out b/regression-test/data/nereids_p0/insert_into_table/aggregate.out index 5fe3e00d1efef8..d99e8ac9a9b561 100644 --- a/regression-test/data/nereids_p0/insert_into_table/aggregate.out +++ b/regression-test/data/nereids_p0/insert_into_table/aggregate.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !11 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -16,7 +16,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !12 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -32,7 +32,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !13 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -48,7 +48,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !21 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -64,7 +64,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !22 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -80,7 +80,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !23 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -186,7 +186,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc1 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 diff --git a/regression-test/data/nereids_p0/insert_into_table/duplicate.out b/regression-test/data/nereids_p0/insert_into_table/duplicate.out index 01d4bdd2f87de9..7cb44cacaf1ef5 100644 --- a/regression-test/data/nereids_p0/insert_into_table/duplicate.out +++ b/regression-test/data/nereids_p0/insert_into_table/duplicate.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !11 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 @@ -27,8 +27,8 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !12 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 @@ -79,8 +79,8 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !13 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 @@ -136,7 +136,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !21 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 @@ -163,8 +163,8 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !22 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 @@ -215,8 +215,8 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !23 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 @@ -534,9 +534,9 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc1 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 diff --git a/regression-test/data/nereids_p0/insert_into_table/no_partition.out b/regression-test/data/nereids_p0/insert_into_table/no_partition.out index d5cbddf9e69e21..0e29c6a27854bc 100644 --- a/regression-test/data/nereids_p0/insert_into_table/no_partition.out +++ b/regression-test/data/nereids_p0/insert_into_table/no_partition.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !11 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -16,7 +16,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !12 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -32,7 +32,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !21 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -48,7 +48,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !22 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -124,7 +124,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !11 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 @@ -151,8 +151,8 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !12 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 @@ -203,7 +203,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !21 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 @@ -230,8 +230,8 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !22 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 2 20 47545 10698279 213965645 0.2 0.7416 34.484000 char12 varchar12 string2 2012-03-02 2012-03-02 2012-03-02T02:01:02 2012-03-02T02:01:02 23.123 2345.12345 23456789.12345600 @@ -434,7 +434,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !11 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -450,7 +450,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !12 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -466,7 +466,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !21 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -482,7 +482,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !22 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -498,7 +498,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !31 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -514,7 +514,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !32 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -530,7 +530,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !41 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -546,7 +546,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !42 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -682,7 +682,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc1 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 2 20 47545 10698279 213965645 0.2 0.7416 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 4 80 190045 42760779 855215645 0.4 1.4491 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -713,9 +713,9 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc1 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 @@ -864,7 +864,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc1 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -895,7 +895,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc3 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 diff --git a/regression-test/data/nereids_p0/insert_into_table/unique.out b/regression-test/data/nereids_p0/insert_into_table/unique.out index 736140914f3d4c..bb735d1c740622 100644 --- a/regression-test/data/nereids_p0/insert_into_table/unique.out +++ b/regression-test/data/nereids_p0/insert_into_table/unique.out @@ -1,6 +1,6 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !11 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -16,7 +16,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !12 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -32,7 +32,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !13 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -48,7 +48,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !21 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -64,7 +64,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !22 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -80,7 +80,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !23 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -96,7 +96,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !31 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -112,7 +112,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !32 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -128,7 +128,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !33 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -144,7 +144,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !41 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -160,7 +160,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !42 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -176,7 +176,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !43 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -372,7 +372,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc1 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 @@ -403,7 +403,7 @@ 13 true 12 20480 48640045 10944010779 218880215645 1.2 22.634 1102.957000 6975.71 104845.843 479399.861 2012-03-12 2012-03-12 2012-03-12T12:11:12 2012-03-12T12:11:12 267.565 26789.56787 267901233.56789800 -- !lsc3 -- -\N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N \N +\N \N \N \N \N \N \N \N \N \N null null null \N \N \N \N \N \N \N 1 false 1 10 23795 5354529 107090645 0.1 0.5244 24.395000 char11 varchar11 string1 2012-03-01 2012-03-01 2012-03-01T01:00:01 2012-03-01T01:00:01 12.012 1234.01234 12345678.01234500 2 false 3 40 95045 21385779 427715645 0.3 1.0368 48.756000 char13 varchar13 string3 2012-03-03 2012-03-03 2012-03-03T03:02:03 2012-03-03T03:02:03 34.234 3456.23456 34567900.23456700 3 false 5 160 380045 85510779 1710215645 0.5 2.031 97.494000 char12 varchar12 string2 2012-03-05 2012-03-05 2012-03-05T05:04:05 2012-03-05T05:04:05 56.456 5678.45678 56790122.45678900 diff --git a/regression-test/pipeline/p0/conf/regression-conf.groovy b/regression-test/pipeline/p0/conf/regression-conf.groovy index 5ad2bde38f178f..0a680eb0b8578b 100644 --- a/regression-test/pipeline/p0/conf/regression-conf.groovy +++ b/regression-test/pipeline/p0/conf/regression-conf.groovy @@ -55,7 +55,7 @@ testDirectories = "" excludeGroups = "" // this suites will not be executed -excludeSuites = "test_sql_block_rule,test_ddl,test_analyze,test_leading,test_stream_load_move_memtable,test_profile,test_broker_load,test_spark_load,test_refresh_mtmv,test_bitmap_filter,test_export_parquet,nereids_delete_mow_partial_update,insert_group_commit_with_prepare_stmt" +excludeSuites = "test_sql_block_rule,test_ddl,test_analyze,test_leading,test_stream_load_move_memtable,test_profile,test_broker_load,test_spark_load,test_refresh_mtmv,test_bitmap_filter,test_export_parquet,nereids_delete_mow_partial_update" // this directories will not be executed excludeDirectories = "workload_manager_p1" From 4594fd25d83a14d193b6ed9604aef7026946be21 Mon Sep 17 00:00:00 2001 From: Xiangyu Wang Date: Sun, 17 Sep 2023 00:05:07 +0800 Subject: [PATCH 19/33] [Fix](kerberos) Fix kerberos relogin bugs when using hdfs-load. (#24490) --- .../doris/fs/remote/dfs/DFSFileSystem.java | 66 +++++++++---------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java index ce297ce92068f0..79c83b80891d20 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/fs/remote/dfs/DFSFileSystem.java @@ -82,47 +82,45 @@ protected FileSystem nativeFileSystem(String remotePath) throws UserException { conf.set(propEntry.getKey(), propEntry.getValue()); } - boolean hasRelogin = false; - UserGroupInformation ugi; + UserGroupInformation ugi = login(conf); try { - // try use current ugi first to avoid relogin - // because it may be a time-consuming task - ugi = UserGroupInformation.getCurrentUser(); - } catch (IOException e) { - LOG.warn("An IOException occurs when invoke " - + "UserGroupInformation.getCurrentUser(), relogin immediately.", e); - ugi = doLogin(conf); - hasRelogin = true; + dfsFileSystem = ugi.doAs((PrivilegedAction) () -> { + try { + return FileSystem.get(new Path(remotePath).toUri(), conf); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } catch (SecurityException e) { + throw new UserException(e); } - do { + Preconditions.checkNotNull(dfsFileSystem); + operations = new HDFSFileOperations(dfsFileSystem); + return dfsFileSystem; + } + + private UserGroupInformation login(Configuration conf) throws UserException { + if (AuthType.KERBEROS.getDesc().equals( + conf.get(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, null))) { try { - dfsFileSystem = ugi.doAs((PrivilegedAction) () -> { - try { - String username = properties.get(HdfsResource.HADOOP_USER_NAME); - return username == null - ? FileSystem.get(new Path(remotePath).toUri(), conf) - : FileSystem.get(new Path(remotePath).toUri(), conf, username); - } catch (IOException | InterruptedException e) { - throw new RuntimeException(e); - } - }); - LOG.debug("Reuse current ugi for dfs, remote path: {}", remotePath); - break; - } catch (SecurityException e) { - LOG.warn("A SecurityException occurs when invoke ugi.doAs(), " - + "relogin and retry immediately.", e); - if (hasRelogin) { - throw new UserException(e); + UserGroupInformation ugi = UserGroupInformation.getLoginUser(); + String principal = conf.get(HdfsResource.HADOOP_KERBEROS_PRINCIPAL); + LOG.debug("Current login user: {}", ugi.getUserName()); + if (ugi.hasKerberosCredentials() && ugi.getUserName().equals(principal)) { + // if the current user is logged by kerberos and is the same user + // just use checkTGTAndReloginFromKeytab because this method will only relogin + // when the TGT is expired or is close to expiry + ugi.checkTGTAndReloginFromKeytab(); + return ugi; } - ugi = doLogin(conf); - hasRelogin = true; + } catch (IOException e) { + LOG.warn("A SecurityException occurs with kerberos, do login immediately.", e); + return doLogin(conf); } - } while (true); + } - Preconditions.checkNotNull(dfsFileSystem); - operations = new HDFSFileOperations(dfsFileSystem); - return dfsFileSystem; + return doLogin(conf); } private UserGroupInformation doLogin(Configuration conf) throws UserException { From 5c9785045f72a64711b2ddc132b20aa14960a88a Mon Sep 17 00:00:00 2001 From: abmdocrt Date: Sun, 17 Sep 2023 00:08:32 +0800 Subject: [PATCH 20/33] [Fix](Full compaction) Fix local variable using in test case (#24440) --- .../test_full_compaction_by_table_id.groovy | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/regression-test/suites/compaction/test_full_compaction_by_table_id.groovy b/regression-test/suites/compaction/test_full_compaction_by_table_id.groovy index dc3c47c5a391bd..cd721774c21fd8 100644 --- a/regression-test/suites/compaction/test_full_compaction_by_table_id.groovy +++ b/regression-test/suites/compaction/test_full_compaction_by_table_id.groovy @@ -50,7 +50,7 @@ suite("test_full_compaction_by_table_id") { `user_id` INT NOT NULL, `value` INT NOT NULL) UNIQUE KEY(`user_id`) DISTRIBUTED BY HASH(`user_id`) - BUCKETS 32 + BUCKETS 8 PROPERTIES ("replication_allocation" = "tag.location.default: 1", "disable_auto_compaction" = "true", "enable_unique_key_merge_on_write" = "true");""" @@ -105,7 +105,7 @@ suite("test_full_compaction_by_table_id") { // before full compaction, there are 7 rowsets in all tablets. for (int i=0; i size()) { - return Status::IOError( + return Status::InvalidArgument( fmt::format("offset exceeds file size(offset: {), file size: {}, path: {})", offset, size(), path().native())); } diff --git a/be/src/olap/tablet.cpp b/be/src/olap/tablet.cpp index c9704f7f83b162..85af4ac196d134 100644 --- a/be/src/olap/tablet.cpp +++ b/be/src/olap/tablet.cpp @@ -1639,9 +1639,10 @@ void Tablet::build_tablet_report_info(TTabletInfo* tablet_info, } if (tablet_state() == TABLET_RUNNING) { - if (has_version_cross || is_io_error_too_times()) { + if (has_version_cross || is_io_error_too_times() || !data_dir()->is_used()) { LOG(INFO) << "report " << full_name() << " as bad, version_cross=" << has_version_cross - << ", ioe times=" << get_io_error_times(); + << ", ioe times=" << get_io_error_times() << ", data_dir used " + << data_dir()->is_used(); tablet_info->__set_used(false); } diff --git a/be/src/olap/tablet.h b/be/src/olap/tablet.h index 99dfd445b1fc4f..2a10535a982804 100644 --- a/be/src/olap/tablet.h +++ b/be/src/olap/tablet.h @@ -533,7 +533,13 @@ class Tablet : public BaseTablet { void gc_binlogs(int64_t version); Status ingest_binlog_metas(RowsetBinlogMetasPB* metas_pb); - inline void increase_io_error_times() { ++_io_error_times; } + inline void report_error(const Status& st) { + if (st.is()) { + ++_io_error_times; + } else if (st.is()) { + _io_error_times = config::max_tablet_io_errors + 1; + } + } inline int64_t get_io_error_times() const { return _io_error_times; } diff --git a/be/src/olap/utils.cpp b/be/src/olap/utils.cpp index 8537cb7adc5f30..f0c72d335b3813 100644 --- a/be/src/olap/utils.cpp +++ b/be/src/olap/utils.cpp @@ -427,14 +427,14 @@ Status read_write_test_file(const std::string& test_file_path) { if (access(test_file_path.c_str(), F_OK) == 0) { if (remove(test_file_path.c_str()) != 0) { char errmsg[64]; - return Status::Error("fail to access test file. path={}, errno={}, err={}", - test_file_path, errno, strerror_r(errno, errmsg, 64)); + return Status::IOError("fail to access test file. path={}, errno={}, err={}", + test_file_path, errno, strerror_r(errno, errmsg, 64)); } } else { if (errno != ENOENT) { char errmsg[64]; - return Status::Error("fail to access test file. path={}, errno={}, err={}", - test_file_path, errno, strerror_r(errno, errmsg, 64)); + return Status::IOError("fail to access test file. path={}, errno={}, err={}", + test_file_path, errno, strerror_r(errno, errmsg, 64)); } } diff --git a/be/src/util/bitmap_value.h b/be/src/util/bitmap_value.h index 041afa7e66dcb5..410f542a99a4fe 100644 --- a/be/src/util/bitmap_value.h +++ b/be/src/util/bitmap_value.h @@ -88,7 +88,7 @@ struct BitmapTypeCode { fmt::format("BitmapTypeCode invalid, should between: {} and {} actrual is {}", BitmapTypeCode::EMPTY, BitmapTypeCode::BITMAP64, bitmap_type); LOG(ERROR) << err_msg; - return Status::IOError(err_msg); + return Status::Corruption(err_msg); } return Status::OK(); } diff --git a/be/src/vec/columns/column_array.cpp b/be/src/vec/columns/column_array.cpp index 7922b692b9b306..c528c54e847107 100644 --- a/be/src/vec/columns/column_array.cpp +++ b/be/src/vec/columns/column_array.cpp @@ -819,7 +819,7 @@ Status ColumnArray::filter_by_selector(const uint16_t* sel, size_t sel_size, ICo max_offset = std::max(max_offset, offset_at(sel[i])); } if (max_offset > std::numeric_limits::max()) { - return Status::IOError("array elements too large than uint16_t::max"); + return Status::Corruption("array elements too large than uint16_t::max"); } to_offsets.reserve(to_offsets.size() + sel_size); diff --git a/be/src/vec/columns/column_map.cpp b/be/src/vec/columns/column_map.cpp index cca9415ef03dee..58a253c52ca6fa 100644 --- a/be/src/vec/columns/column_map.cpp +++ b/be/src/vec/columns/column_map.cpp @@ -416,7 +416,7 @@ Status ColumnMap::filter_by_selector(const uint16_t* sel, size_t sel_size, IColu max_offset = std::max(max_offset, offset_at(sel[i])); } if (max_offset > std::numeric_limits::max()) { - return Status::IOError("map elements too large than uint16_t::max"); + return Status::Corruption("map elements too large than uint16_t::max"); } to_offsets.reserve(to_offsets.size() + sel_size); diff --git a/be/src/vec/olap/block_reader.cpp b/be/src/vec/olap/block_reader.cpp index b7a640ab4afb3f..0c303a97444536 100644 --- a/be/src/vec/olap/block_reader.cpp +++ b/be/src/vec/olap/block_reader.cpp @@ -214,6 +214,10 @@ Status BlockReader::init(const ReaderParams& read_params) { auto status = _init_collect_iter(read_params); if (!status.ok()) { + if (UNLIKELY(!status.ok() && !status.is())) { + _tablet->report_error(status); + } + return status; } diff --git a/be/src/vec/olap/block_reader.h b/be/src/vec/olap/block_reader.h index 818440994514cc..b573e1066945b3 100644 --- a/be/src/vec/olap/block_reader.h +++ b/be/src/vec/olap/block_reader.h @@ -48,7 +48,11 @@ class BlockReader final : public TabletReader { Status init(const ReaderParams& read_params) override; Status next_block_with_aggregation(Block* block, bool* eof) override { - return (this->*_next_block_func)(block, eof); + auto res = (this->*_next_block_func)(block, eof); + if (UNLIKELY(!res.ok() && !res.is())) { + _tablet->report_error(res); + } + return res; } std::vector current_block_row_locations() { return _block_row_locations; } diff --git a/be/src/vec/olap/vertical_block_reader.cpp b/be/src/vec/olap/vertical_block_reader.cpp index c64147950a9125..0b2cb065b08dc7 100644 --- a/be/src/vec/olap/vertical_block_reader.cpp +++ b/be/src/vec/olap/vertical_block_reader.cpp @@ -205,8 +205,8 @@ Status VerticalBlockReader::init(const ReaderParams& read_params) { auto status = _init_collect_iter(read_params); if (!status.ok()) { - if (status.is()) { - _tablet->increase_io_error_times(); + if (UNLIKELY(!status.ok() && !status.is())) { + _tablet->report_error(status); } return status; } diff --git a/be/src/vec/olap/vertical_block_reader.h b/be/src/vec/olap/vertical_block_reader.h index 9ba4f9119bf9a5..2a7ed375928e8a 100644 --- a/be/src/vec/olap/vertical_block_reader.h +++ b/be/src/vec/olap/vertical_block_reader.h @@ -60,8 +60,8 @@ class VerticalBlockReader final : public TabletReader { Status next_block_with_aggregation(Block* block, bool* eof) override { auto res = (this->*_next_block_func)(block, eof); - if (UNLIKELY(res.is())) { - _tablet->increase_io_error_times(); + if (UNLIKELY(!res.ok() && !res.is())) { + _tablet->report_error(res); } return res; } From ebe582758fa5fbddab01de5557f345edc1d9c4df Mon Sep 17 00:00:00 2001 From: jakevin Date: Sun, 17 Sep 2023 11:16:03 +0800 Subject: [PATCH 22/33] [opt](Nereids): use LocalDate to replace Calendar (#24361) --- .../nereids/types/coercion/DateLikeType.java | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/types/coercion/DateLikeType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/types/coercion/DateLikeType.java index ff728a73acef43..eca9170157b337 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/types/coercion/DateLikeType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/types/coercion/DateLikeType.java @@ -27,31 +27,33 @@ import org.apache.doris.nereids.types.DateType; import org.apache.doris.nereids.types.DateV2Type; +import java.time.LocalDate; import java.time.temporal.ChronoUnit; -import java.util.Calendar; /** * date like type. */ public abstract class DateLikeType extends PrimitiveType { - private Calendar toCalendar(double d) { - //d = (year * 10000 + month * 100 + day) * 1000000L; + private LocalDate toLocalDate(double d) { + // d = (year * 10000 + month * 100 + day) * 1000000L; int date = (int) (d / 1000000); int day = date % 100; int month = (date / 100) % 100; int year = date / 10000; - Calendar calendar = Calendar.getInstance(); - calendar.set(Calendar.YEAR, year); - calendar.set(Calendar.MONTH, month); - calendar.set(Calendar.DAY_OF_MONTH, day); - return calendar; + return LocalDate.of(year, month, day); } @Override public double rangeLength(double high, double low) { - Calendar to = toCalendar(high); - Calendar from = toCalendar(low); - return ChronoUnit.DAYS.between(from.toInstant(), to.toInstant()); + if (high == low) { + return 0; + } + if (Double.isInfinite(high) || Double.isInfinite(low)) { + return high - low; + } + LocalDate to = toLocalDate(high); + LocalDate from = toLocalDate(low); + return ChronoUnit.DAYS.between(from, to); } /** From 92e521bba54a0f7ca92e0b86e850e823063641fb Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sun, 17 Sep 2023 20:26:21 +0800 Subject: [PATCH 23/33] [regression-test](fix) fix query_p0/having/having.groovy case bug (#24478) --- regression-test/suites/query_p0/having/having.groovy | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/regression-test/suites/query_p0/having/having.groovy b/regression-test/suites/query_p0/having/having.groovy index 2cb0755d5423e7..e54bc3f9a6824a 100644 --- a/regression-test/suites/query_p0/having/having.groovy +++ b/regression-test/suites/query_p0/having/having.groovy @@ -21,6 +21,7 @@ suite("having") { sql "set enable_nereids_planner=false;" + sql """DROP TABLE IF EXISTS supplier""" sql """CREATE TABLE `supplier` ( `s_suppkey` int(11) NOT NULL, `s_name` varchar(25) NOT NULL, @@ -46,4 +47,4 @@ suite("having") { from supplier s group by s_nationkey,s_suppkey having s_nationkey=1 or s_suppkey=1;""" -} \ No newline at end of file +} From f1e049d4d6375845ca8de5ccda468bd56e764479 Mon Sep 17 00:00:00 2001 From: DongLiang-0 <46414265+DongLiang-0@users.noreply.github.com> Date: Sun, 17 Sep 2023 21:17:05 +0800 Subject: [PATCH 24/33] [fix](java-udaf)Fix need to restart BE after replacing the jar package in java-udaf (#24469) --- .../doris/analysis/CreateFunctionStmt.java | 154 +++++++++--------- 1 file changed, 81 insertions(+), 73 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java index d00318d0a27529..c50acda716fb3a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java @@ -387,93 +387,101 @@ private void analyzeJavaUdaf(String clazz) throws AnalysisException { try { URL[] urls = {new URL("jar:" + userFile + "!/")}; - URLClassLoader cl = URLClassLoader.newInstance(urls); - Class udfClass = cl.loadClass(clazz); - String udfClassName = udfClass.getCanonicalName(); - String stateClassName = udfClassName + "$" + STATE_CLASS_NAME; - Class stateClass = cl.loadClass(stateClassName); - - for (Method m : udfClass.getMethods()) { - if (!m.getDeclaringClass().equals(udfClass)) { - continue; + try (URLClassLoader cl = URLClassLoader.newInstance(urls)) { + Class udfClass = cl.loadClass(clazz); + String udfClassName = udfClass.getCanonicalName(); + String stateClassName = udfClassName + "$" + STATE_CLASS_NAME; + Class stateClass = cl.loadClass(stateClassName); + + for (Method m : udfClass.getMethods()) { + if (!m.getDeclaringClass().equals(udfClass)) { + continue; + } + String name = m.getName(); + if (allMethods.containsKey(name)) { + throw new AnalysisException( + String.format("UDF class '%s' has multiple methods with name '%s' ", udfClassName, + name)); + } + allMethods.put(name, m); } - String name = m.getName(); - if (allMethods.containsKey(name)) { + + if (allMethods.get(CREATE_METHOD_NAME) == null) { throw new AnalysisException( - String.format("UDF class '%s' has multiple methods with name '%s' ", udfClassName, name)); + String.format("No method '%s' in class '%s'!", CREATE_METHOD_NAME, udfClassName)); + } else { + checkMethodNonStaticAndPublic(CREATE_METHOD_NAME, allMethods.get(CREATE_METHOD_NAME), udfClassName); + checkArgumentCount(allMethods.get(CREATE_METHOD_NAME), 0, udfClassName); + checkReturnJavaType(udfClassName, allMethods.get(CREATE_METHOD_NAME), stateClass); } - allMethods.put(name, m); - } - - if (allMethods.get(CREATE_METHOD_NAME) == null) { - throw new AnalysisException( - String.format("No method '%s' in class '%s'!", CREATE_METHOD_NAME, udfClassName)); - } else { - checkMethodNonStaticAndPublic(CREATE_METHOD_NAME, allMethods.get(CREATE_METHOD_NAME), udfClassName); - checkArgumentCount(allMethods.get(CREATE_METHOD_NAME), 0, udfClassName); - checkReturnJavaType(udfClassName, allMethods.get(CREATE_METHOD_NAME), stateClass); - } - if (allMethods.get(DESTROY_METHOD_NAME) == null) { - throw new AnalysisException( - String.format("No method '%s' in class '%s'!", DESTROY_METHOD_NAME, udfClassName)); - } else { - checkMethodNonStaticAndPublic(DESTROY_METHOD_NAME, allMethods.get(DESTROY_METHOD_NAME), udfClassName); - checkArgumentCount(allMethods.get(DESTROY_METHOD_NAME), 1, udfClassName); - checkReturnJavaType(udfClassName, allMethods.get(DESTROY_METHOD_NAME), void.class); - } + if (allMethods.get(DESTROY_METHOD_NAME) == null) { + throw new AnalysisException( + String.format("No method '%s' in class '%s'!", DESTROY_METHOD_NAME, udfClassName)); + } else { + checkMethodNonStaticAndPublic(DESTROY_METHOD_NAME, allMethods.get(DESTROY_METHOD_NAME), + udfClassName); + checkArgumentCount(allMethods.get(DESTROY_METHOD_NAME), 1, udfClassName); + checkReturnJavaType(udfClassName, allMethods.get(DESTROY_METHOD_NAME), void.class); + } - if (allMethods.get(ADD_METHOD_NAME) == null) { - throw new AnalysisException( - String.format("No method '%s' in class '%s'!", ADD_METHOD_NAME, udfClassName)); - } else { - checkMethodNonStaticAndPublic(ADD_METHOD_NAME, allMethods.get(ADD_METHOD_NAME), udfClassName); - checkArgumentCount(allMethods.get(ADD_METHOD_NAME), argsDef.getArgTypes().length + 1, udfClassName); - checkReturnJavaType(udfClassName, allMethods.get(ADD_METHOD_NAME), void.class); - for (int i = 0; i < argsDef.getArgTypes().length; i++) { - Parameter p = allMethods.get(ADD_METHOD_NAME).getParameters()[i + 1]; - checkUdfType(udfClass, allMethods.get(ADD_METHOD_NAME), argsDef.getArgTypes()[i], p.getType(), - p.getName()); + if (allMethods.get(ADD_METHOD_NAME) == null) { + throw new AnalysisException( + String.format("No method '%s' in class '%s'!", ADD_METHOD_NAME, udfClassName)); + } else { + checkMethodNonStaticAndPublic(ADD_METHOD_NAME, allMethods.get(ADD_METHOD_NAME), udfClassName); + checkArgumentCount(allMethods.get(ADD_METHOD_NAME), argsDef.getArgTypes().length + 1, udfClassName); + checkReturnJavaType(udfClassName, allMethods.get(ADD_METHOD_NAME), void.class); + for (int i = 0; i < argsDef.getArgTypes().length; i++) { + Parameter p = allMethods.get(ADD_METHOD_NAME).getParameters()[i + 1]; + checkUdfType(udfClass, allMethods.get(ADD_METHOD_NAME), argsDef.getArgTypes()[i], p.getType(), + p.getName()); + } } - } - if (allMethods.get(SERIALIZE_METHOD_NAME) == null) { - throw new AnalysisException( - String.format("No method '%s' in class '%s'!", SERIALIZE_METHOD_NAME, udfClassName)); - } else { - checkMethodNonStaticAndPublic(SERIALIZE_METHOD_NAME, allMethods.get(SERIALIZE_METHOD_NAME), - udfClassName); - checkArgumentCount(allMethods.get(SERIALIZE_METHOD_NAME), 2, udfClassName); - checkReturnJavaType(udfClassName, allMethods.get(SERIALIZE_METHOD_NAME), void.class); - } + if (allMethods.get(SERIALIZE_METHOD_NAME) == null) { + throw new AnalysisException( + String.format("No method '%s' in class '%s'!", SERIALIZE_METHOD_NAME, udfClassName)); + } else { + checkMethodNonStaticAndPublic(SERIALIZE_METHOD_NAME, allMethods.get(SERIALIZE_METHOD_NAME), + udfClassName); + checkArgumentCount(allMethods.get(SERIALIZE_METHOD_NAME), 2, udfClassName); + checkReturnJavaType(udfClassName, allMethods.get(SERIALIZE_METHOD_NAME), void.class); + } - if (allMethods.get(MERGE_METHOD_NAME) == null) { - throw new AnalysisException( - String.format("No method '%s' in class '%s'!", MERGE_METHOD_NAME, udfClassName)); - } else { - checkMethodNonStaticAndPublic(MERGE_METHOD_NAME, allMethods.get(MERGE_METHOD_NAME), udfClassName); - checkArgumentCount(allMethods.get(MERGE_METHOD_NAME), 2, udfClassName); - checkReturnJavaType(udfClassName, allMethods.get(MERGE_METHOD_NAME), void.class); - } + if (allMethods.get(MERGE_METHOD_NAME) == null) { + throw new AnalysisException( + String.format("No method '%s' in class '%s'!", MERGE_METHOD_NAME, udfClassName)); + } else { + checkMethodNonStaticAndPublic(MERGE_METHOD_NAME, allMethods.get(MERGE_METHOD_NAME), udfClassName); + checkArgumentCount(allMethods.get(MERGE_METHOD_NAME), 2, udfClassName); + checkReturnJavaType(udfClassName, allMethods.get(MERGE_METHOD_NAME), void.class); + } - if (allMethods.get(GETVALUE_METHOD_NAME) == null) { - throw new AnalysisException( - String.format("No method '%s' in class '%s'!", GETVALUE_METHOD_NAME, udfClassName)); - } else { - checkMethodNonStaticAndPublic(GETVALUE_METHOD_NAME, allMethods.get(GETVALUE_METHOD_NAME), udfClassName); - checkArgumentCount(allMethods.get(GETVALUE_METHOD_NAME), 1, udfClassName); - checkReturnUdfType(udfClass, allMethods.get(GETVALUE_METHOD_NAME), returnType.getType()); - } + if (allMethods.get(GETVALUE_METHOD_NAME) == null) { + throw new AnalysisException( + String.format("No method '%s' in class '%s'!", GETVALUE_METHOD_NAME, udfClassName)); + } else { + checkMethodNonStaticAndPublic(GETVALUE_METHOD_NAME, allMethods.get(GETVALUE_METHOD_NAME), + udfClassName); + checkArgumentCount(allMethods.get(GETVALUE_METHOD_NAME), 1, udfClassName); + checkReturnUdfType(udfClass, allMethods.get(GETVALUE_METHOD_NAME), returnType.getType()); + } - if (!Modifier.isPublic(stateClass.getModifiers()) || !Modifier.isStatic(stateClass.getModifiers())) { + if (!Modifier.isPublic(stateClass.getModifiers()) || !Modifier.isStatic(stateClass.getModifiers())) { + throw new AnalysisException( + String.format( + "UDAF '%s' should have one public & static 'State' class to Construction data ", + udfClassName)); + } + } catch (ClassNotFoundException e) { throw new AnalysisException( - String.format("UDAF '%s' should have one public & static 'State' class to Construction data ", - udfClassName)); + "Class [" + clazz + "] or inner class [State] not found in file :" + userFile); + } catch (IOException e) { + throw new AnalysisException("Failed to load file: " + userFile); } } catch (MalformedURLException e) { throw new AnalysisException("Failed to load file: " + userFile); - } catch (ClassNotFoundException e) { - throw new AnalysisException("Class [" + clazz + "] or inner class [State] not found in file :" + userFile); } } From a07f59de8c45ac26bfd0140c8cc7fdd75c838bf3 Mon Sep 17 00:00:00 2001 From: Qi Chen Date: Mon, 18 Sep 2023 09:51:33 +0800 Subject: [PATCH 25/33] [Fix](multi-catalog) Fix hadoop viewfs issues. (#24507) Error Msg: Caused by: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='viewfs://my-cluster/ns1/usr/hive/warehouse/viewfs.db/parquet_table', inputFormat='org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'} in catalog test_viewfs_hive at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:466) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.access$400(HiveMetaStoreCache.java:112) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:210) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:202) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42) ~[doris-fe.jar:1.2-SNAPSHOT] at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_131] ... 3 more Caused by: org.apache.doris.common.UserException: errCode = 2, detailMessage = Failed to list located status for path: viewfs://my-cluster/ns1/usr/hive/warehouse/viewfs.db/parquet_table at org.apache.doris.fs.remote.RemoteFileSystem.listLocatedFiles(RemoteFileSystem.java:54) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFileCache(HiveMetaStoreCache.java:381) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:432) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.access$400(HiveMetaStoreCache.java:112) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:210) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:202) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42) ~[doris-fe.jar:1.2-SNAPSHOT] at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_131] ... 3 more Caused by: java.nio.file.AccessDeniedException: viewfs://my-cluster/ns1/usr/hive/warehouse/viewfs.db/parquet_table: org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException: No AWS Credentials provided by TemporaryAWSCredentialsProvider SimpleAWSCredentialsProvider EnvironmentVariableCredentialsProvider IAMInstanceCredentialsProvider : com.amazonaws.SdkClientException: Unable to load AWS credentials from environment variables (AWS_ACCESS_KEY_ID (or AWS_ACCESS_KEY) and AWS_SECRET_KEY (or AWS_SECRET_ACCESS_KEY)) at org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java:215) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.Invoker.onceInTheFuture(Invoker.java:190) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.Listing$ObjectListingIterator.next(Listing.java:651) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.Listing$FileStatusListingIterator.requestNextBatch(Listing.java:430) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.Listing$FileStatusListingIterator.(Listing.java:372) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.Listing.createFileStatusListingIterator(Listing.java:143) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.Listing.getListFilesAssumingDir(Listing.java:211) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.S3AFileSystem.innerListFiles(S3AFileSystem.java:4898) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.S3AFileSystem.lambda$listFiles$38(S3AFileSystem.java:4840) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.invokeTrackingDuration(IOStatisticsBinding.java:547) ~[hadoop-common-3.3.6.jar:?] at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.lambda$trackDurationOfOperation$5(IOStatisticsBinding.java:528) ~[hadoop-common-3.3.6.jar:?] at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.trackDuration(IOStatisticsBinding.java:449) ~[hadoop-common-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.S3AFileSystem.trackDurationAndSpan(S3AFileSystem.java:2480) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.S3AFileSystem.trackDurationAndSpan(S3AFileSystem.java:2499) ~[hadoop-aws-3.3.6.jar:?] at org.apache.hadoop.fs.s3a.S3AFileSystem.listFiles(S3AFileSystem.java:4839) ~[hadoop-aws-3.3.6.jar:?] at org.apache.doris.fs.remote.RemoteFileSystem.listLocatedFiles(RemoteFileSystem.java:50) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFileCache(HiveMetaStoreCache.java:381) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:432) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache.access$400(HiveMetaStoreCache.java:112) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:210) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:202) ~[doris-fe.jar:1.2-SNAPSHOT] at org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42) ~[doris-fe.jar:1.2-SNAPSHOT] at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_131] ... 3 more --- .../main/java/org/apache/doris/analysis/StorageBackend.java | 5 +++-- .../org/apache/doris/datasource/hive/HiveMetaStoreCache.java | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java index f3d7f7e49f02d4..e88115492430c3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java @@ -65,8 +65,9 @@ public static void checkPath(String path, StorageBackend.StorageType type) throw } } else if (type == StorageBackend.StorageType.S3 && !schema.equalsIgnoreCase("s3")) { throw new AnalysisException("Invalid export path. please use valid 's3://' path."); - } else if (type == StorageBackend.StorageType.HDFS && !schema.equalsIgnoreCase("hdfs")) { - throw new AnalysisException("Invalid export path. please use valid 'HDFS://' path."); + } else if (type == StorageBackend.StorageType.HDFS && !schema.equalsIgnoreCase("hdfs") + && !schema.equalsIgnoreCase("viewfs")) { + throw new AnalysisException("Invalid export path. please use valid 'HDFS://' or 'viewfs://' path."); } else if (type == StorageBackend.StorageType.LOCAL && !schema.equalsIgnoreCase("file")) { throw new AnalysisException( "Invalid export path. please use valid '" + OutFileClause.LOCAL_FILE_PREFIX + "' path."); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java index ec1704547aa396..0a85d9ff5bd00e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java @@ -416,7 +416,7 @@ private FileCacheValue loadFiles(FileCacheKey key) { if (uri.getScheme() != null) { String scheme = uri.getScheme(); updateJobConf("fs." + scheme + ".impl.disable.cache", "true"); - if (!scheme.equals("hdfs")) { + if (!scheme.equals("hdfs") && !scheme.equals("viewfs")) { updateJobConf("fs." + scheme + ".impl", PropertyConverter.getHadoopFSImplByScheme(scheme)); } } From 591aeaa98d1178e2e277278c7afeafef9bdb88d6 Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Mon, 18 Sep 2023 09:57:38 +0800 Subject: [PATCH 26/33] Revert "[schedule](pipeline) Remove wait schedule time in pipeline query engine (#23994)" (#24472) This reverts commit 32a7eef96a09799c8336c1964bfe7d676b7e4c98. --- be/src/pipeline/pipeline_task.cpp | 2 ++ be/src/pipeline/pipeline_task.h | 4 ++++ be/src/pipeline/task_scheduler.cpp | 31 ++++++++++++++++++------------ be/src/pipeline/task_scheduler.h | 1 + 4 files changed, 26 insertions(+), 12 deletions(-) diff --git a/be/src/pipeline/pipeline_task.cpp b/be/src/pipeline/pipeline_task.cpp index add92cabf37065..bb27012b3d4994 100644 --- a/be/src/pipeline/pipeline_task.cpp +++ b/be/src/pipeline/pipeline_task.cpp @@ -86,6 +86,7 @@ void PipelineTask::_fresh_profile_counter() { COUNTER_SET(_schedule_counts, (int64_t)_schedule_time); COUNTER_SET(_wait_sink_timer, (int64_t)_wait_sink_watcher.elapsed_time()); COUNTER_SET(_wait_worker_timer, (int64_t)_wait_worker_watcher.elapsed_time()); + COUNTER_SET(_wait_schedule_timer, (int64_t)_wait_schedule_watcher.elapsed_time()); COUNTER_SET(_begin_execute_timer, _begin_execute_time); COUNTER_SET(_eos_timer, _eos_time); COUNTER_SET(_src_pending_finish_over_timer, _src_pending_finish_over_time); @@ -116,6 +117,7 @@ void PipelineTask::_init_profile() { _wait_bf_timer = ADD_TIMER(_task_profile, "WaitBfTime"); _wait_sink_timer = ADD_TIMER(_task_profile, "WaitSinkTime"); _wait_worker_timer = ADD_TIMER(_task_profile, "WaitWorkerTime"); + _wait_schedule_timer = ADD_TIMER(_task_profile, "WaitScheduleTime"); _block_counts = ADD_COUNTER(_task_profile, "NumBlockedTimes", TUnit::UNIT); _block_by_source_counts = ADD_COUNTER(_task_profile, "NumBlockedBySrcTimes", TUnit::UNIT); _block_by_sink_counts = ADD_COUNTER(_task_profile, "NumBlockedBySinkTimes", TUnit::UNIT); diff --git a/be/src/pipeline/pipeline_task.h b/be/src/pipeline/pipeline_task.h index b8b8e89215f565..4311c48f3258cf 100644 --- a/be/src/pipeline/pipeline_task.h +++ b/be/src/pipeline/pipeline_task.h @@ -133,6 +133,8 @@ class PipelineTask { _wait_worker_watcher.start(); } void pop_out_runnable_queue() { _wait_worker_watcher.stop(); } + void start_schedule_watcher() { _wait_schedule_watcher.start(); } + void stop_schedule_watcher() { _wait_schedule_watcher.stop(); } PipelineTaskState get_state() { return _cur_state; } void set_state(PipelineTaskState state); @@ -309,6 +311,8 @@ class PipelineTask { MonotonicStopWatch _wait_worker_watcher; RuntimeProfile::Counter* _wait_worker_timer; // TODO we should calculate the time between when really runnable and runnable + MonotonicStopWatch _wait_schedule_watcher; + RuntimeProfile::Counter* _wait_schedule_timer; RuntimeProfile::Counter* _yield_counts; RuntimeProfile::Counter* _core_change_times; diff --git a/be/src/pipeline/task_scheduler.cpp b/be/src/pipeline/task_scheduler.cpp index c4278c38077cdc..e4a4ec38af9f16 100644 --- a/be/src/pipeline/task_scheduler.cpp +++ b/be/src/pipeline/task_scheduler.cpp @@ -85,6 +85,7 @@ void BlockedTaskScheduler::_schedule() { _started.store(true); std::list local_blocked_tasks; int empty_times = 0; + std::vector ready_tasks; while (!_shutdown) { { @@ -104,7 +105,6 @@ void BlockedTaskScheduler::_schedule() { } } - auto origin_local_block_tasks_size = local_blocked_tasks.size(); auto iter = local_blocked_tasks.begin(); vectorized::VecDateTimeValue now = vectorized::VecDateTimeValue::local_time(); while (iter != local_blocked_tasks.end()) { @@ -116,52 +116,57 @@ void BlockedTaskScheduler::_schedule() { VLOG_DEBUG << "Task pending" << task->debug_string(); iter++; } else { - _make_task_run(local_blocked_tasks, iter, PipelineTaskState::PENDING_FINISH); + _make_task_run(local_blocked_tasks, iter, ready_tasks, + PipelineTaskState::PENDING_FINISH); } } else if (task->query_context()->is_cancelled()) { - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } else if (task->query_context()->is_timeout(now)) { LOG(WARNING) << "Timeout, query_id=" << print_id(task->query_context()->query_id()) << ", instance_id=" << print_id(task->instance_id()) << ", task info: " << task->debug_string(); task->query_context()->cancel(true, "", Status::Cancelled("")); - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } else if (state == PipelineTaskState::BLOCKED_FOR_DEPENDENCY) { if (task->has_dependency()) { iter++; } else { - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } } else if (state == PipelineTaskState::BLOCKED_FOR_SOURCE) { if (task->source_can_read()) { - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } else { iter++; } } else if (state == PipelineTaskState::BLOCKED_FOR_RF) { if (task->runtime_filters_are_ready_or_timeout()) { - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } else { iter++; } } else if (state == PipelineTaskState::BLOCKED_FOR_SINK) { if (task->sink_can_write()) { - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } else { iter++; } } else { // TODO: DCHECK the state - _make_task_run(local_blocked_tasks, iter); + _make_task_run(local_blocked_tasks, iter, ready_tasks); } } - if (origin_local_block_tasks_size == 0 || - local_blocked_tasks.size() == origin_local_block_tasks_size) { + if (ready_tasks.empty()) { empty_times += 1; } else { empty_times = 0; + for (auto& task : ready_tasks) { + task->stop_schedule_watcher(); + _task_queue->push_back(task); + } + ready_tasks.clear(); } if (empty_times != 0 && (empty_times & (EMPTY_TIMES_TO_YIELD - 1)) == 0) { @@ -181,11 +186,13 @@ void BlockedTaskScheduler::_schedule() { void BlockedTaskScheduler::_make_task_run(std::list& local_tasks, std::list::iterator& task_itr, + std::vector& ready_tasks, PipelineTaskState t_state) { auto task = *task_itr; + task->start_schedule_watcher(); task->set_state(t_state); local_tasks.erase(task_itr++); - _task_queue->push_back(task); + ready_tasks.emplace_back(task); } TaskScheduler::~TaskScheduler() { diff --git a/be/src/pipeline/task_scheduler.h b/be/src/pipeline/task_scheduler.h index 13b9e734d699d4..b9d3dfbac3cee6 100644 --- a/be/src/pipeline/task_scheduler.h +++ b/be/src/pipeline/task_scheduler.h @@ -71,6 +71,7 @@ class BlockedTaskScheduler { void _schedule(); void _make_task_run(std::list& local_tasks, std::list::iterator& task_itr, + std::vector& ready_tasks, PipelineTaskState state = PipelineTaskState::RUNNABLE); }; From f04bc05a7e60dd7adb6d4b149559d58a757c7aa4 Mon Sep 17 00:00:00 2001 From: Jerry Hu Date: Mon, 18 Sep 2023 10:24:32 +0800 Subject: [PATCH 27/33] [fix](agg) The offset value was added twice in 'pack_fixed' (#24506) --- be/src/vec/common/aggregation_common.h | 1 - 1 file changed, 1 deletion(-) diff --git a/be/src/vec/common/aggregation_common.h b/be/src/vec/common/aggregation_common.h index 20cda701ebdbcc..68ee8e4cca284e 100644 --- a/be/src/vec/common/aggregation_common.h +++ b/be/src/vec/common/aggregation_common.h @@ -95,7 +95,6 @@ T pack_fixed(size_t i, size_t keys_size, const ColumnRawPtrs& key_columns, const static_cast(key_columns[j])->get_raw_data_begin<1>() + i * key_sizes[j], key_sizes[j]); - offset += key_sizes[j]; } offset += key_sizes[j]; From c746a89c7237ef81191f674334174eeb25f9636a Mon Sep 17 00:00:00 2001 From: yujun Date: Mon, 18 Sep 2023 11:06:30 +0800 Subject: [PATCH 28/33] [improvement](transaction) print txn edit log cost time #24501 --- .../src/main/java/org/apache/doris/persist/EditLog.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java b/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java index f2a890bd890972..de11eba5fdab97 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java @@ -1504,10 +1504,18 @@ public void logExportUpdateState(long jobId, ExportJobState newState) { // for TransactionState public void logInsertTransactionState(TransactionState transactionState) { + long start = System.currentTimeMillis(); long logId = logEdit(OperationType.OP_UPSERT_TRANSACTION_STATE, transactionState); + long logEditEnd = System.currentTimeMillis(); + long end = logEditEnd; if (transactionState.getTransactionStatus() == TransactionStatus.VISIBLE) { UpsertRecord record = new UpsertRecord(logId, transactionState); Env.getCurrentEnv().getBinlogManager().addUpsertRecord(record); + end = System.currentTimeMillis(); + } + if (end - start > Config.lock_reporting_threshold_ms) { + LOG.warn("edit log insert transaction take a lot time, write bdb {} ms, write binlog {} ms", + logEditEnd - start, end - logEditEnd); } } From 7f7ec496cd626b011dc96863575dd60303903008 Mon Sep 17 00:00:00 2001 From: Pxl Date: Mon, 18 Sep 2023 11:11:53 +0800 Subject: [PATCH 29/33] [Chore](checks) fix sonarcloud properties have wrong path (#24517) fix sonarcloud properties have wrong path --- sonar-project.properties | 1 - .github/workflows/sonarcloud.yml | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/ sonar-project.properties b/ sonar-project.properties index 4d826c71ac965b..bc2baada2c69a2 100644 --- a/ sonar-project.properties +++ b/ sonar-project.properties @@ -19,4 +19,3 @@ sonar.host.url=https://sonarcloud.io sonar.projectKey=apache_incubator-doris sonar.organization=apache -sonar.sources=be diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index d9728dcb344a6f..232946fd83fdda 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -154,5 +154,5 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: sonar-scanner -Dsonar.cfamily.compile-commands=be/build_Release/compile_commands.json -Dsonar.sources=be + run: sonar-scanner -Dsonar.cfamily.compile-commands=be/build_Release/compile_commands.json From 23a75d0277fd2e2be04e9270112c9e84194dada5 Mon Sep 17 00:00:00 2001 From: amory Date: Mon, 18 Sep 2023 11:12:26 +0800 Subject: [PATCH 30/33] [FIX](decimalv3) Fix decimalv3 with abnormal value same with mysql result (#24499) Fix decimalv3 with abnormal value same with mysql result --- be/src/util/string_parser.hpp | 10 +++++++++- regression-test/data/query_p0/cast/test_cast.out | 12 ++++++++++++ .../suites/query_p0/cast/test_cast.groovy | 9 +++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/be/src/util/string_parser.hpp b/be/src/util/string_parser.hpp index 3aa35ab47d7ac3..5485e3ff5db392 100644 --- a/be/src/util/string_parser.hpp +++ b/be/src/util/string_parser.hpp @@ -715,13 +715,21 @@ T StringParser::string_to_decimal(const char* s, int len, int type_precision, in } break; } else { + // jump to here: should handle the wrong character of decimal if (value == 0) { *result = StringParser::PARSE_FAILURE; return 0; } + // here to handle *result = StringParser::PARSE_SUCCESS; - if (type_scale > scale) { + if (type_scale >= scale) { value *= get_scale_multiplier(type_scale - scale); + // here meet non-valid character, should return the value, keep going to meet + // the E/e character because we make right user-given type_precision + // not max number type_precision + if (!is_numeric_ascii(c)) { + return is_negative ? T(-value) : T(value); + } } } } diff --git a/regression-test/data/query_p0/cast/test_cast.out b/regression-test/data/query_p0/cast/test_cast.out index b4f08ded8a6795..89b29cc663d78e 100644 --- a/regression-test/data/query_p0/cast/test_cast.out +++ b/regression-test/data/query_p0/cast/test_cast.out @@ -17,3 +17,15 @@ -- !sql_decimalv3 -- 0.0 +-- !sql_decimalv3 -- +1001.000000000 + +-- !sql_decimalv3 -- +1001 + +-- !sql_decimalv3 -- +99 + +-- !sql_decimalv3 -- +9 + diff --git a/regression-test/suites/query_p0/cast/test_cast.groovy b/regression-test/suites/query_p0/cast/test_cast.groovy index 43963efef5608c..c39f89b645e5b2 100644 --- a/regression-test/suites/query_p0/cast/test_cast.groovy +++ b/regression-test/suites/query_p0/cast/test_cast.groovy @@ -65,6 +65,15 @@ suite('test_cast') { // overflow with min value qt_sql_decimalv3 """ select cast('0.2147483648e-3' as DECIMALV3(2, 1))""" + // decimalv3 with abnormal decimal case , + qt_sql_decimalv3 """ select cast('1001-12-31 00:00:00' as DECIMALV3(27, 9))""" + + qt_sql_decimalv3 """ select cast('1001-12-31 00:00:00' as DECIMALV3(9, 0))""" + + qt_sql_decimalv3 """ select cast('1001-12-31 00:00:00' as DECIMALV3(2, 0))""" + + qt_sql_decimalv3 """ select cast('1001-12-31 00:00:00' as DECIMALV3(1, 0))""" + def tbl = "test_cast" sql """ DROP TABLE IF EXISTS ${tbl}""" From 932b639086b929745871ec1016e8c9419e3c7f20 Mon Sep 17 00:00:00 2001 From: lihangyu <15605149486@163.com> Date: Mon, 18 Sep 2023 11:25:40 +0800 Subject: [PATCH 31/33] [refactor](point query) decouple PointQueryExec from the Coordinator (#24509) In order to decouple PointQueryExec from the Coordinator, both PointQueryExec and Coordinator inherit from CoordInterface, and are collectively scheduled through StmtExecutor. --- .../org/apache/doris/qe/CoordInterface.java | 31 +++++ .../java/org/apache/doris/qe/Coordinator.java | 63 ++------- .../org/apache/doris/qe/PointQueryExec.java | 122 +++++++++++++----- .../org/apache/doris/qe/StmtExecutor.java | 34 +++-- 4 files changed, 153 insertions(+), 97 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/qe/CoordInterface.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/CoordInterface.java b/fe/fe-core/src/main/java/org/apache/doris/qe/CoordInterface.java new file mode 100644 index 00000000000000..925cd1fd15b325 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/CoordInterface.java @@ -0,0 +1,31 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.qe; + +import org.apache.doris.proto.Types; + +public interface CoordInterface { + public void exec() throws Exception; + + public RowBatch getNext() throws Exception; + + public int getInstanceTotalNum(); + + public void cancel(Types.PPlanFragmentCancelReason cancelReason); +} + diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java b/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java index 3309c828833ade..f601faba683728 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java @@ -19,8 +19,6 @@ import org.apache.doris.analysis.Analyzer; import org.apache.doris.analysis.DescriptorTable; -import org.apache.doris.analysis.PrepareStmt; -import org.apache.doris.analysis.PrepareStmt.PreparedType; import org.apache.doris.analysis.StorageBackend; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.FsBroker; @@ -150,7 +148,7 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; -public class Coordinator { +public class Coordinator implements CoordInterface { private static final Logger LOG = LogManager.getLogger(Coordinator.class); private static final String localIP = FrontendOptions.getLocalHostAddress(); @@ -260,7 +258,6 @@ public class Coordinator { public Map ridToBuilderNum = Maps.newHashMap(); private ConnectContext context; - private boolean isPointQuery = false; private PointQueryExec pointExec = null; private StatsErrorEstimator statsErrorEstimator; @@ -294,32 +291,7 @@ public Coordinator(ConnectContext context, Analyzer analyzer, Planner planner) { this.queryId = context.queryId(); this.fragments = planner.getFragments(); this.scanNodes = planner.getScanNodes(); - - if (this.scanNodes.size() == 1 && this.scanNodes.get(0) instanceof OlapScanNode) { - OlapScanNode olapScanNode = (OlapScanNode) (this.scanNodes.get(0)); - isPointQuery = olapScanNode.isPointQuery(); - if (isPointQuery) { - PlanFragment fragment = fragments.get(0); - LOG.debug("execPointGet fragment {}", fragment); - OlapScanNode planRoot = (OlapScanNode) fragment.getPlanRoot(); - Preconditions.checkNotNull(planRoot); - pointExec = new PointQueryExec(planRoot.getPointQueryEqualPredicates(), - planRoot.getDescTable(), fragment.getOutputExprs()); - } - } - PrepareStmt prepareStmt = analyzer == null ? null : analyzer.getPrepareStmt(); - if (prepareStmt != null && prepareStmt.getPreparedType() == PreparedType.FULL_PREPARED) { - // Used cached or better performance - this.descTable = prepareStmt.getDescTable(); - if (pointExec != null) { - pointExec.setCacheID(prepareStmt.getID()); - pointExec.setSerializedDescTable(prepareStmt.getSerializedDescTable()); - pointExec.setSerializedOutputExpr(prepareStmt.getSerializedOutputExprs()); - pointExec.setBinaryProtocol(prepareStmt.isBinaryProtocol()); - } - } else { - this.descTable = planner.getDescTable().toThrift(); - } + this.descTable = planner.getDescTable().toThrift(); this.returnedAllResults = false; this.enableShareHashTableForBroadcastJoin = context.getSessionVariable().enableShareHashTableForBroadcastJoin; @@ -506,6 +478,7 @@ public Map getBeToInstancesNum() { return result; } + @Override public int getInstanceTotalNum() { return instanceTotalNum; } @@ -598,6 +571,7 @@ public TExecPlanFragmentParams getStreamLoadPlan() throws Exception { // 'Request' must contain at least a coordinator plan fragment (ie, can't // be for a query like 'SELECT 1'). // A call to Exec() must precede all other member function calls. + @Override public void exec() throws Exception { if (LOG.isDebugEnabled() && !scanNodes.isEmpty()) { LOG.debug("debug: in Coordinator::exec. query id: {}, planNode: {}", @@ -649,17 +623,10 @@ public void exec() throws Exception { LOG.info("dispatch load job: {} to {}", DebugUtil.printId(queryId), addressToBackendID.keySet()); } executionProfile.markInstances(instanceIds); - if (!isPointQuery) { - if (enablePipelineEngine) { - sendPipelineCtx(); - } else { - sendFragment(); - } + if (enablePipelineEngine) { + sendPipelineCtx(); } else { - OlapScanNode planRoot = (OlapScanNode) fragments.get(0).getPlanRoot(); - Preconditions.checkState(planRoot.getScanTabletIds().size() == 1); - pointExec.setCandidateBackends(planRoot.getScanBackendIds()); - pointExec.setTabletId(planRoot.getScanTabletIds().get(0)); + sendFragment(); } } @@ -1187,6 +1154,7 @@ private void updateStatus(Status status, TUniqueId instanceId) { } } + @Override public RowBatch getNext() throws Exception { if (receiver == null) { throw new UserException("There is no receiver."); @@ -1194,12 +1162,7 @@ public RowBatch getNext() throws Exception { RowBatch resultBatch; Status status = new Status(); - - if (!isPointQuery) { - resultBatch = receiver.getNext(status); - } else { - resultBatch = pointExec.getNext(status); - } + resultBatch = receiver.getNext(status); if (!status.ok()) { LOG.warn("get next fail, need cancel. query id: {}", DebugUtil.printId(queryId)); } @@ -1325,6 +1288,7 @@ public void cancel() { cancel(Types.PPlanFragmentCancelReason.USER_CANCEL); } + @Override public void cancel(Types.PPlanFragmentCancelReason cancelReason) { lock(); try { @@ -2095,13 +2059,6 @@ private Map getReplicaNumPerHostForOlapTable() { // Populates scan_range_assignment_. // > private void computeScanRangeAssignment() throws Exception { - if (isPointQuery) { - // Fast path for evaluate Backend for point query - List locations = ((OlapScanNode) scanNodes.get(0)).lazyEvaluateRangeLocations(); - Preconditions.checkNotNull(locations); - return; - } - Map assignedBytesPerHost = Maps.newHashMap(); Map replicaNumPerHost = getReplicaNumPerHostForOlapTable(); Collections.shuffle(scanNodes); diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/PointQueryExec.java b/fe/fe-core/src/main/java/org/apache/doris/qe/PointQueryExec.java index bda52b94ad2416..0ffb5b989d8370 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/PointQueryExec.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/PointQueryExec.java @@ -17,23 +17,33 @@ package org.apache.doris.qe; +import org.apache.doris.analysis.Analyzer; import org.apache.doris.analysis.DescriptorTable; import org.apache.doris.analysis.Expr; import org.apache.doris.analysis.LiteralExpr; +import org.apache.doris.analysis.PrepareStmt; import org.apache.doris.analysis.SlotRef; import org.apache.doris.catalog.Env; import org.apache.doris.common.Config; import org.apache.doris.common.Status; +import org.apache.doris.common.UserException; +import org.apache.doris.planner.OlapScanNode; +import org.apache.doris.planner.PlanFragment; +import org.apache.doris.planner.Planner; import org.apache.doris.proto.InternalService; import org.apache.doris.proto.InternalService.KeyTuple; +import org.apache.doris.proto.Types; import org.apache.doris.rpc.BackendServiceProxy; import org.apache.doris.rpc.RpcException; import org.apache.doris.system.Backend; import org.apache.doris.thrift.TExpr; import org.apache.doris.thrift.TExprList; import org.apache.doris.thrift.TResultBatch; +import org.apache.doris.thrift.TScanRangeLocations; import org.apache.doris.thrift.TStatusCode; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import com.google.protobuf.ByteString; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -43,7 +53,6 @@ import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -53,7 +62,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -public class PointQueryExec { +public class PointQueryExec implements CoordInterface { private static final Logger LOG = LogManager.getLogger(PointQueryExec.class); // SlotRef sorted by column id private Map equalPredicats; @@ -69,22 +78,55 @@ public class PointQueryExec { private boolean isBinaryProtocol = false; private List candidateBackends; + Planner planner; // For parepared statement cached structure, // there are some pre caculated structure in Backend TabletFetch service // using this ID to find for this prepared statement private UUID cacheID; - public PointQueryExec(Map equalPredicats, DescriptorTable descTable, - ArrayList outputExprs) { - this.equalPredicats = equalPredicats; - this.descriptorTable = descTable; - this.outputExprs = outputExprs; + private OlapScanNode getPlanRoot() { + List fragments = planner.getFragments(); + PlanFragment fragment = fragments.get(0); + LOG.debug("execPointGet fragment {}", fragment); + OlapScanNode planRoot = (OlapScanNode) fragment.getPlanRoot(); + Preconditions.checkNotNull(planRoot); + return planRoot; } - void setCandidateBackends(HashSet backendsIds) { + public PointQueryExec(Planner planner, Analyzer analyzer) { + // init from planner + this.planner = planner; + List fragments = planner.getFragments(); + PlanFragment fragment = fragments.get(0); + OlapScanNode planRoot = getPlanRoot(); + this.equalPredicats = planRoot.getPointQueryEqualPredicates(); + this.descriptorTable = planRoot.getDescTable(); + this.outputExprs = fragment.getOutputExprs(); + + PrepareStmt prepareStmt = analyzer == null ? null : analyzer.getPrepareStmt(); + if (prepareStmt != null && prepareStmt.getPreparedType() == PrepareStmt.PreparedType.FULL_PREPARED) { + // Used cached or better performance + this.cacheID = prepareStmt.getID(); + this.serializedDescTable = prepareStmt.getSerializedDescTable(); + this.serializedOutputExpr = prepareStmt.getSerializedOutputExprs(); + this.isBinaryProtocol = prepareStmt.isBinaryProtocol(); + } else { + // TODO + // planner.getDescTable().toThrift(); + } + } + + void setScanRangeLocations() throws Exception { + OlapScanNode planRoot = getPlanRoot(); + // compute scan range + List locations = planRoot.lazyEvaluateRangeLocations(); + Preconditions.checkState(planRoot.getScanTabletIds().size() == 1); + this.tabletID = planRoot.getScanTabletIds().get(0); + + Preconditions.checkNotNull(locations); candidateBackends = new ArrayList<>(); - for (Long backendID : backendsIds) { + for (Long backendID : planRoot.getScanBackendIds()) { Backend backend = Env.getCurrentSystemInfo().getBackend(backendID); if (SimpleScheduler.isAvailable(backend)) { candidateBackends.add(backend); @@ -92,32 +134,13 @@ void setCandidateBackends(HashSet backendsIds) { } // Random read replicas Collections.shuffle(this.candidateBackends); - } - - public void setSerializedDescTable(ByteString serializedDescTable) { - this.serializedDescTable = serializedDescTable; - } - - public void setSerializedOutputExpr(ByteString serializedOutputExpr) { - this.serializedOutputExpr = serializedOutputExpr; - } - - public void setCacheID(UUID cacheID) { - this.cacheID = cacheID; - } - - public void setTabletId(long tabletID) { - this.tabletID = tabletID; + LOG.debug("set scan locations, backend ids {}, tablet id {}", candidateBackends, tabletID); } public void setTimeout(long timeoutMs) { this.timeoutMs = timeoutMs; } - public void setBinaryProtocol(boolean isBinaryProtocol) { - this.isBinaryProtocol = isBinaryProtocol; - } - void addKeyTuples( InternalService.PTabletKeyLookupRequest.Builder requestBuilder) { // TODO handle IN predicates @@ -129,11 +152,26 @@ void addKeyTuples( requestBuilder.addKeyTuples(kBuilder); } - public RowBatch getNext(Status status) throws TException { + @Override + public int getInstanceTotalNum() { + // TODO + return 1; + } + + @Override + public void cancel(Types.PPlanFragmentCancelReason cancelReason) { + // Do nothing + } + + + @Override + public RowBatch getNext() throws Exception { + setScanRangeLocations(); Iterator backendIter = candidateBackends.iterator(); RowBatch rowBatch = null; int tryCount = 0; int maxTry = Math.min(Config.max_point_query_retry_time, candidateBackends.size()); + Status status = new Status(); do { Backend backend = backendIter.next(); rowBatch = getNextInternal(status, backend); @@ -146,9 +184,33 @@ public RowBatch getNext(Status status) throws TException { } status.setStatus(Status.OK); } while (true); + // handle status code + if (!status.ok()) { + if (Strings.isNullOrEmpty(status.getErrorMsg())) { + status.rewriteErrorMsg(); + } + if (status.isRpcError()) { + throw new RpcException(null, status.getErrorMsg()); + } else { + String errMsg = status.getErrorMsg(); + LOG.warn("query failed: {}", errMsg); + + // hide host info + int hostIndex = errMsg.indexOf("host"); + if (hostIndex != -1) { + errMsg = errMsg.substring(0, hostIndex); + } + throw new UserException(errMsg); + } + } return rowBatch; } + @Override + public void exec() throws Exception { + // Do nothing + } + private RowBatch getNextInternal(Status status, Backend backend) throws TException { long timeoutTs = System.currentTimeMillis() + timeoutMs; RowBatch rowBatch = new RowBatch(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java index c4ef5ad1604fb1..b7b633a4b3d493 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java @@ -1389,19 +1389,25 @@ private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable // // 2. If this is a query, send the result expr fields first, and send result data back to client. RowBatch batch; - coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator()); - if (Config.enable_workload_group && context.sessionVariable.getEnablePipelineEngine()) { - coord.setTWorkloadGroups(context.getEnv().getWorkloadGroupMgr().getWorkloadGroup(context)); + CoordInterface coordBase = null; + if (queryStmt instanceof SelectStmt && ((SelectStmt) parsedStmt).isPointQueryShortCircuit()) { + coordBase = new PointQueryExec(planner, analyzer); } else { - context.setWorkloadGroupName(""); + coord = new Coordinator(context, analyzer, planner, context.getStatsErrorEstimator()); + if (Config.enable_workload_group && context.sessionVariable.getEnablePipelineEngine()) { + coord.setTWorkloadGroups(context.getEnv().getWorkloadGroupMgr().getWorkloadGroup(context)); + } else { + context.setWorkloadGroupName(""); + } + QeProcessorImpl.INSTANCE.registerQuery(context.queryId(), + new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord)); + profile.addExecutionProfile(coord.getExecutionProfile()); + coordBase = coord; } - QeProcessorImpl.INSTANCE.registerQuery(context.queryId(), - new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord)); - profile.addExecutionProfile(coord.getExecutionProfile()); Span queryScheduleSpan = context.getTracer().spanBuilder("query schedule").setParent(Context.current()).startSpan(); try (Scope scope = queryScheduleSpan.makeCurrent()) { - coord.exec(); + coordBase.exec(); } catch (Exception e) { queryScheduleSpan.recordException(e); throw e; @@ -1410,12 +1416,12 @@ private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable } profile.getSummaryProfile().setQueryScheduleFinishTime(); updateProfile(false); - if (coord.getInstanceTotalNum() > 1 && LOG.isDebugEnabled()) { + if (coordBase.getInstanceTotalNum() > 1 && LOG.isDebugEnabled()) { try { LOG.debug("Start to execute fragment. user: {}, db: {}, sql: {}, fragment instance num: {}", context.getQualifiedUser(), context.getDatabase(), parsedStmt.getOrigStmt().originStmt.replace("\n", " "), - coord.getInstanceTotalNum()); + coordBase.getInstanceTotalNum()); } catch (Exception e) { LOG.warn("Fail to print fragment concurrency for Query.", e); } @@ -1426,7 +1432,7 @@ private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable while (true) { // register the fetch result time. profile.getSummaryProfile().setTempStartTime(); - batch = coord.getNext(); + batch = coordBase.getNext(); profile.getSummaryProfile().freshFetchResultConsumeTime(); // for outfile query, there will be only one empty batch send back with eos flag @@ -1494,17 +1500,17 @@ private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable // in some case may block all fragment handle threads // details see issue https://github.com/apache/doris/issues/16203 LOG.warn("cancel fragment query_id:{} cause {}", DebugUtil.printId(context.queryId()), e.getMessage()); - coord.cancel(Types.PPlanFragmentCancelReason.INTERNAL_ERROR); + coordBase.cancel(Types.PPlanFragmentCancelReason.INTERNAL_ERROR); fetchResultSpan.recordException(e); throw e; } finally { fetchResultSpan.end(); - if (coord.getInstanceTotalNum() > 1 && LOG.isDebugEnabled()) { + if (coordBase.getInstanceTotalNum() > 1 && LOG.isDebugEnabled()) { try { LOG.debug("Finish to execute fragment. user: {}, db: {}, sql: {}, fragment instance num: {}", context.getQualifiedUser(), context.getDatabase(), parsedStmt.getOrigStmt().originStmt.replace("\n", " "), - coord.getInstanceTotalNum()); + coordBase.getInstanceTotalNum()); } catch (Exception e) { LOG.warn("Fail to print fragment concurrency for Query.", e); } From 7a8e3a65876c6dbadf33b916f4337e38dc13add3 Mon Sep 17 00:00:00 2001 From: xzj7019 <131111794+xzj7019@users.noreply.github.com> Date: Mon, 18 Sep 2023 11:26:55 +0800 Subject: [PATCH 32/33] [fix](nereids) fix cte filter pushdown if the filters can be aggregated (#24489) Current cte common filter extraction doesn't work if the filters can be aggregated, which will lead the common filter can't be pushed down inside cte. Consider the following case: with main as (select c1 from t1) select * from (select m1.* from main m1, main m2 where m1.c1 = m2.c1) abc where c1 = 1; The common c1=1 filter can't be pushed down. This pr fixed the original extraction logic from set to list to make the logic works, and this will also resolve the tpcds query4/11's pattern works well also. --- .../rules/rewrite/RewriteCteChildren.java | 7 +-- .../cte/test_cte_filter_pushdown.out | 43 +++++++++++++++++ .../cte/test_cte_filter_pushdown.groovy | 47 +++++++++++++++++++ 3 files changed, 94 insertions(+), 3 deletions(-) create mode 100644 regression-test/data/nereids_p0/cte/test_cte_filter_pushdown.out create mode 100644 regression-test/suites/nereids_p0/cte/test_cte_filter_pushdown.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/RewriteCteChildren.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/RewriteCteChildren.java index d88ef62e314eaf..5aa286e67f9c27 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/RewriteCteChildren.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/RewriteCteChildren.java @@ -145,10 +145,10 @@ private LogicalPlan tryToConstructFilter(CascadesContext cascadesContext, CTEId Set consumerIds = cascadesContext.getCteIdToConsumers().get(cteId).stream() .map(LogicalCTEConsumer::getRelationId) .collect(Collectors.toSet()); - Set> filtersAboveEachConsumer = cascadesContext.getConsumerIdToFilters().entrySet().stream() + List> filtersAboveEachConsumer = cascadesContext.getConsumerIdToFilters().entrySet().stream() .filter(kv -> consumerIds.contains(kv.getKey())) .map(Entry::getValue) - .collect(Collectors.toSet()); + .collect(Collectors.toList()); Set someone = filtersAboveEachConsumer.stream().findFirst().orElse(null); if (someone == null) { return child; @@ -156,11 +156,12 @@ private LogicalPlan tryToConstructFilter(CascadesContext cascadesContext, CTEId int filterSize = cascadesContext.getCteIdToConsumers().get(cteId).size(); Set conjuncts = new HashSet<>(); for (Expression f : someone) { - int matchCount = 1; + int matchCount = 0; Set slots = f.collect(e -> e instanceof SlotReference); Set mightBeJoined = new HashSet<>(); for (Set another : filtersAboveEachConsumer) { if (another.equals(someone)) { + matchCount++; continue; } Set matched = new HashSet<>(); diff --git a/regression-test/data/nereids_p0/cte/test_cte_filter_pushdown.out b/regression-test/data/nereids_p0/cte/test_cte_filter_pushdown.out new file mode 100644 index 00000000000000..0c632f4fc29d01 --- /dev/null +++ b/regression-test/data/nereids_p0/cte/test_cte_filter_pushdown.out @@ -0,0 +1,43 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !cte_filter_pushdown_1 -- +PhysicalCteAnchor ( cteId=CTEId#0 ) +--PhysicalCteProducer ( cteId=CTEId#0 ) +----PhysicalWindow +------PhysicalQuickSort +--------PhysicalProject +----------filter((main.k1 = 1)) +------------PhysicalOlapScan[test] +--PhysicalResultSink +----PhysicalDistribute +------PhysicalProject +--------hashJoin[INNER_JOIN](m1.k1 = m2.k1) +----------PhysicalDistribute +------------filter((temp.k1 = 1)) +--------------PhysicalCteConsumer ( cteId=CTEId#0 ) +----------PhysicalDistribute +------------PhysicalProject +--------------filter((m2.k1 = 1)) +----------------PhysicalCteConsumer ( cteId=CTEId#0 ) + +-- !cte_filter_pushdown_2 -- +PhysicalCteAnchor ( cteId=CTEId#0 ) +--PhysicalCteProducer ( cteId=CTEId#0 ) +----PhysicalProject +------filter((main.k1 = 1)) +--------PhysicalWindow +----------PhysicalQuickSort +------------PhysicalDistribute +--------------PhysicalProject +----------------PhysicalOlapScan[test] +--PhysicalResultSink +----PhysicalDistribute +------PhysicalProject +--------hashJoin[INNER_JOIN](m1.k1 = m2.k1) +----------PhysicalDistribute +------------filter((temp.k1 = 1)) +--------------PhysicalCteConsumer ( cteId=CTEId#0 ) +----------PhysicalDistribute +------------PhysicalProject +--------------filter((m2.k1 = 1)) +----------------PhysicalCteConsumer ( cteId=CTEId#0 ) + diff --git a/regression-test/suites/nereids_p0/cte/test_cte_filter_pushdown.groovy b/regression-test/suites/nereids_p0/cte/test_cte_filter_pushdown.groovy new file mode 100644 index 00000000000000..8f08721f6cd3a5 --- /dev/null +++ b/regression-test/suites/nereids_p0/cte/test_cte_filter_pushdown.groovy @@ -0,0 +1,47 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +suite("test_cte_filter_pushdown)") { + sql "SET enable_nereids_planner=true" + sql "SET enable_pipeline_engine=true" + sql "SET enable_fallback_to_original_planner=false" + + // CTE filter pushing down with the same filter + qt_cte_filter_pushdown_1 """ + explain shape plan + with main AS ( + select k1, row_number() over (partition by k1) rn + from nereids_test_query_db.test + ) + select * from ( + select m1.* from main m1, main m2 + where m1.k1 = m2.k1 + ) temp + where k1 = 1; + """ + qt_cte_filter_pushdown_2 """ + explain shape plan + with main AS ( + select k1, row_number() over (partition by k2) rn + from nereids_test_query_db.test + ) + select * from ( + select m1.* from main m1, main m2 + where m1.k1 = m2.k1 + ) temp + where k1 = 1; + """ +} From ae0b58fcdeb205cdb3efe151ee0256f87ff5c6ca Mon Sep 17 00:00:00 2001 From: ZhenchaoXu <49646212+ixzc@users.noreply.github.com> Date: Mon, 18 Sep 2023 11:27:52 +0800 Subject: [PATCH 33/33] [typo](doc)modify error word (#24456) --- docs/zh-CN/docs/ecosystem/flink-doris-connector.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh-CN/docs/ecosystem/flink-doris-connector.md b/docs/zh-CN/docs/ecosystem/flink-doris-connector.md index fe8bb2765caa1b..9c9c46755cb92a 100644 --- a/docs/zh-CN/docs/ecosystem/flink-doris-connector.md +++ b/docs/zh-CN/docs/ecosystem/flink-doris-connector.md @@ -146,7 +146,7 @@ INSERT INTO flink_doris_sink select name,age,price,sale from flink_doris_source #### DataStream -DorisSink是通过StreamLoad想Doris写入数据,DataStream写入时,支持不同的序列化方法 +DorisSink是通过StreamLoad向Doris写入数据,DataStream写入时,支持不同的序列化方法 **String 数据流(SimpleStringSerializer)**