This is an automated email from the ASF dual-hosted git repository. zhangchen pushed a commit to branch branch-2.1 in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.1 by this push: new ae56739f884 [enhancement](sequence col) add session variable to skip sequence column check while INSERT INTO (#41655) (#41720) ae56739f884 is described below commit ae56739f884bc5b91dd86e0d7519ea5bc618c290 Author: zhannngchen <48427519+zhannngc...@users.noreply.github.com> AuthorDate: Sat Oct 12 15:30:20 2024 +0800 [enhancement](sequence col) add session variable to skip sequence column check while INSERT INTO (#41655) (#41720) cp #41655 --- .../apache/doris/analysis/NativeInsertStmt.java | 3 +- .../doris/nereids/rules/analysis/BindSink.java | 6 +++- .../java/org/apache/doris/qe/SessionVariable.java | 16 +++++++++++ .../unique/test_unique_table_sequence.out | 16 +++++++++-- .../unique/test_unique_table_sequence.groovy | 33 ++++++++++++++++++++++ 5 files changed, 70 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java index c3080fd14d8..172904cb49f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/NativeInsertStmt.java @@ -500,7 +500,8 @@ public class NativeInsertStmt extends InsertStmt { } if (!haveInputSeqCol && !isPartialUpdate && !isFromDeleteOrUpdateStmt - && !analyzer.getContext().getSessionVariable().isEnableUniqueKeyPartialUpdate()) { + && !analyzer.getContext().getSessionVariable().isEnableUniqueKeyPartialUpdate() + && analyzer.getContext().getSessionVariable().isRequireSequenceInInsert()) { if (!seqColInTable.isPresent() || seqColInTable.get().getDefaultValue() == null || !seqColInTable.get().getDefaultValue() .equalsIgnoreCase(DefaultValue.CURRENT_TIMESTAMP)) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java index 4a9660a5144..727364d56d4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java @@ -74,6 +74,7 @@ import org.apache.doris.nereids.types.coercion.CharacterType; import org.apache.doris.nereids.util.ExpressionUtils; import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.nereids.util.TypeCoercionUtils; +import org.apache.doris.qe.ConnectContext; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -198,9 +199,12 @@ public class BindSink implements AnalysisRuleFactory { // including the following cases: // 1. it's a load job with `partial_columns=true` // 2. UPDATE and DELETE, planner will automatically add these hidden columns + // 3. session value `require_sequence_in_insert` is false if (!haveInputSeqCol && !isPartialUpdate && ( boundSink.getDmlCommandType() != DMLCommandType.UPDATE - && boundSink.getDmlCommandType() != DMLCommandType.DELETE)) { + && boundSink.getDmlCommandType() != DMLCommandType.DELETE) && ( + boundSink.getDmlCommandType() != DMLCommandType.INSERT + || ConnectContext.get().getSessionVariable().isRequireSequenceInInsert())) { if (!seqColInTable.isPresent() || seqColInTable.get().getDefaultValue() == null || !seqColInTable.get().getDefaultValue() .equalsIgnoreCase(DefaultValue.CURRENT_TIMESTAMP)) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index da95433e01f..e0ad520b9c6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -635,6 +635,7 @@ public class SessionVariable implements Serializable, Writable { "enable_adaptive_pipeline_task_serial_read_on_limit"; public static final String ADAPTIVE_PIPELINE_TASK_SERIAL_READ_ON_LIMIT = "adaptive_pipeline_task_serial_read_on_limit"; + public static final String REQUIRE_SEQUENCE_IN_INSERT = "require_sequence_in_insert"; /** * If set false, user couldn't submit analyze SQL and FE won't allocate any related resources. @@ -2074,6 +2075,13 @@ public class SessionVariable implements Serializable, Writable { }) public int adaptivePipelineTaskSerialReadOnLimit = 10000; + @VariableMgr.VarAttr(name = REQUIRE_SEQUENCE_IN_INSERT, needForward = true, description = { + "该变量用于控制,使用了sequence列的unique key表,insert into操作是否要求必须提供每一行的sequence列的值", + "This variable controls whether the INSERT INTO operation on unique key tables with a sequence" + + " column requires a sequence column to be provided for each row" + }) + public boolean requireSequenceInInsert = true; + public void setEnableEsParallelScroll(boolean enableESParallelScroll) { this.enableESParallelScroll = enableESParallelScroll; } @@ -3443,6 +3451,14 @@ public class SessionVariable implements Serializable, Writable { this.loadStreamPerNode = loadStreamPerNode; } + public void setRequireSequenceInInsert(boolean value) { + this.requireSequenceInInsert = value; + } + + public boolean isRequireSequenceInInsert() { + return this.requireSequenceInInsert; + } + /** * Serialize to thrift object. * Used for rest api. diff --git a/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out b/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out index 5197721c926..9bcf0dbf05c 100644 --- a/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out +++ b/regression-test/data/data_model_p0/unique/test_unique_table_sequence.out @@ -41,10 +41,16 @@ -- !all -- 1 10 15 16 17 0 4 15 -15 8 19 20 21 0 7 3 +15 8 19 20 21 0 9 3 2 5 14 13 14 0 5 12 3 6 11 14 15 0 6 13 +-- !all_clone_table -- +1 10 15 16 17 0 2 \N +15 8 19 20 21 0 2 \N +2 5 14 13 14 0 2 \N +3 6 11 14 15 0 2 \N + -- !1 -- 1 1 1 1 1 0 2 1 2 2 2 2 2 0 2 2 @@ -102,10 +108,16 @@ -- !all -- 1 10 15 16 17 0 4 15 -15 8 19 20 21 0 7 3 +15 8 19 20 21 0 9 3 2 5 14 13 14 0 5 12 3 6 11 14 15 0 6 13 +-- !all_clone_table -- +1 10 15 16 17 0 2 \N +15 8 19 20 21 0 2 \N +2 5 14 13 14 0 2 \N +3 6 11 14 15 0 2 \N + -- !1 -- 1 1 1 1 1 0 2 1 2 2 2 2 2 0 2 2 diff --git a/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy b/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy index db4df6c9e34..d0d54781e0e 100644 --- a/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy +++ b/regression-test/suites/data_model_p0/unique/test_unique_table_sequence.groovy @@ -123,6 +123,15 @@ suite("test_unique_table_sequence") { exception "Table ${tableName} has sequence column, need to specify the sequence column" } + // with `require_sequence_in_insert=false`, previous insert operation should success + sql "SET require_sequence_in_insert=false" + + sql "INSERT INTO ${tableName} values(15, 8, 19, 20, 21)" + + sql "INSERT INTO ${tableName} (k1, v1, v2, v3, v4) values(15, 8, 19, 20, 21)" + + sql "SET require_sequence_in_insert=true" + // correct way of insert into with seq col sql "INSERT INTO ${tableName} (k1, v1, v2, v3, v4, __DORIS_SEQUENCE_COL__) values(15, 8, 19, 20, 21, 3)" @@ -138,7 +147,31 @@ suite("test_unique_table_sequence") { order_qt_all "SELECT * from ${tableName}" + sql "SET show_hidden_columns=false" + + def tableNameClone = tableName + "_clone" + sql "DROP TABLE IF EXISTS ${tableNameClone}" + sql "create table ${tableNameClone} like ${tableName}" + + // test insert into select * + test { + sql "INSERT INTO ${tableNameClone} select * from ${tableName}" + exception "Table ${tableNameClone} has sequence column, need to specify the sequence column" + } + + // with `require_sequence_in_insert=true`, previous insert operation should success + sql "SET require_sequence_in_insert=false" + + sql "INSERT INTO ${tableNameClone} select * from ${tableName}" + + sql "SET require_sequence_in_insert=true" + + sql "SET show_hidden_columns=true" + + order_qt_all_clone_table "SELECT * from ${tableNameClone}" + sql "DROP TABLE ${tableName}" + sql "DROP TABLE ${tableNameClone}" sql "DROP TABLE IF EXISTS ${tableName}" sql """ --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org