This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new bda3d12f3ed [chore](test) adjust timeout for some cases (#37743)
bda3d12f3ed is described below
commit bda3d12f3eda3154b2255c51bff9cb1d419a29d4
Author: Yongqiang YANG <[email protected]>
AuthorDate: Mon Jul 15 22:34:36 2024 +0800
[chore](test) adjust timeout for some cases (#37743)
---
.../suites/load_p0/broker_load/test_etl_failed.groovy | 1 +
.../stream_load/test_load_block_to_single_tablet.groovy | 2 +-
.../load_p0/stream_load/test_load_to_single_tablet.groovy | 12 ++++++------
3 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
b/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
index 30da06c2c98..4049fdadb1f 100644
--- a/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_etl_failed.groovy
@@ -61,6 +61,7 @@ suite("test_etl_failed", "load_p0") {
def max_try_milli_secs = 600000
while (max_try_milli_secs > 0) {
String[][] result = sql """ show load where label="$label" order by
createtime desc limit 1; """
+ logger.info("Load result: " + result[0])
if (result[0][2].equals("FINISHED")) {
logger.info("Load FINISHED " + label)
assertTrue(1 == 2, "etl should be failed")
diff --git
a/regression-test/suites/load_p0/stream_load/test_load_block_to_single_tablet.groovy
b/regression-test/suites/load_p0/stream_load/test_load_block_to_single_tablet.groovy
index 955bdf681af..2a713826bc3 100644
---
a/regression-test/suites/load_p0/stream_load/test_load_block_to_single_tablet.groovy
+++
b/regression-test/suites/load_p0/stream_load/test_load_block_to_single_tablet.groovy
@@ -49,7 +49,7 @@ suite("test_load_block_to_single_tablet", "p0") {
set 'partitions', 'p202403, p202404'
file 'test_load_block_to_single_tablet.csv'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
diff --git
a/regression-test/suites/load_p0/stream_load/test_load_to_single_tablet.groovy
b/regression-test/suites/load_p0/stream_load/test_load_to_single_tablet.groovy
index 02f64900326..2c69247db30 100644
---
a/regression-test/suites/load_p0/stream_load/test_load_to_single_tablet.groovy
+++
b/regression-test/suites/load_p0/stream_load/test_load_to_single_tablet.groovy
@@ -49,7 +49,7 @@ suite("test_load_to_single_tablet", "p0") {
set 'load_to_single_tablet', 'true'
file 'test_load_to_single_tablet.json'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
@@ -89,7 +89,7 @@ suite("test_load_to_single_tablet", "p0") {
set 'load_to_single_tablet', 'true'
file 'test_load_to_single_tablet.json'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
totalCount = sql "select count() from ${tableName}"
@@ -116,7 +116,7 @@ suite("test_load_to_single_tablet", "p0") {
set 'load_to_single_tablet', 'true'
file 'test_load_to_single_tablet.json'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
totalCount = sql "select count() from ${tableName}"
@@ -169,7 +169,7 @@ suite("test_load_to_single_tablet", "p0") {
set 'load_to_single_tablet', 'true'
file 'test_load_to_single_tablet.json'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
@@ -220,7 +220,7 @@ suite("test_load_to_single_tablet", "p0") {
set 'load_to_single_tablet', 'true'
file 'test_load_to_single_tablet.json'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
totalCount = sql "select count() from ${tableName}"
@@ -255,7 +255,7 @@ suite("test_load_to_single_tablet", "p0") {
set 'load_to_single_tablet', 'true'
file 'test_load_to_single_tablet.json'
- time 10000 // limit inflight 10s
+ time 20000 // limit inflight 10s
}
sql "sync"
totalCount = sql "select count() from ${tableName}"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]