This is an automated email from the ASF dual-hosted git repository.
morrysnow pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new cadb4ed6725 [fix](regression) change variable from global to local
(#54482)
cadb4ed6725 is described below
commit cadb4ed67259913af094e7d1a02e0547702e45db
Author: morrySnow <[email protected]>
AuthorDate: Fri Aug 8 15:22:16 2025 +0800
[fix](regression) change variable from global to local (#54482)
---
.../csv_header_p0/test_csv_with_header.groovy | 24 +++++++++++-----------
.../suites/export/test_array_export.groovy | 20 +++++++++---------
.../load_p0/broker_load/test_array_load.groovy | 6 +++---
.../broker_load/test_broker_load_seq.groovy | 6 +++---
.../broker_load/test_broker_load_with_merge.groovy | 6 +++---
.../test_broker_load_with_partition.groovy | 6 +++---
.../test_broker_load_with_properties.groovy | 6 +++---
.../broker_load/test_broker_load_with_where.groovy | 6 +++---
.../test_broker_load_without_filepath.groovy | 6 +++---
.../test_broker_load_without_seq.groovy | 6 +++---
.../ingestion_load/test_ingestion_load.groovy | 2 +-
.../test_ingestion_load_alter_column.groovy | 6 +++---
.../test_ingestion_load_alter_partition.groovy | 8 ++++----
.../test_ingestion_load_drop_table.groovy | 4 ++--
.../test_ingestion_load_multi_table.groovy | 4 ++--
.../test_ingestion_load_with_partition.groovy | 2 +-
.../restore_p0/test_unique_validate_restore.groovy | 1 -
17 files changed, 59 insertions(+), 60 deletions(-)
diff --git a/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
b/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
index 742bdf52425..dab5ee13931 100644
--- a/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
+++ b/regression-test/suites/csv_header_p0/test_csv_with_header.groovy
@@ -118,14 +118,14 @@ suite("test_csv_with_header") {
if (enableHdfs()) {
//test import data from hdfs
- hdfsUser = getHdfsUser()
- brokerName =getBrokerName()
- hdfsPasswd = getHdfsPasswd()
- hdfsFs = getHdfsFs()
+ def hdfsUser = getHdfsUser()
+ def brokerName =getBrokerName()
+ def hdfsPasswd = getHdfsPasswd()
+ def hdfsFs = getHdfsFs()
//[broker load] test normal
label = UUID.randomUUID().toString().replaceAll("-", "")
- remote_csv_file = uploadToHdfs format_csv_file
- export_result = import_from_hdfs.call(testTable, label,
remote_csv_file, format_csv, brokerName, hdfsUser, hdfsPasswd)
+ def remote_csv_file = uploadToHdfs format_csv_file
+ def export_result = import_from_hdfs.call(testTable, label,
remote_csv_file, format_csv, brokerName, hdfsUser, hdfsPasswd)
check_import_result.call(label, testTable, expect_rows * 4)
//[broker load] csv_with_names
@@ -179,22 +179,22 @@ suite("test_csv_with_header") {
}
sql "sync"
- resultCount = sql "select count(*) from ${testTable}"
- currentTotalRows = resultCount[0][0]
+ def resultCount = sql "select count(*) from ${testTable}"
+ def currentTotalRows = resultCount[0][0]
// export table to hdfs format=csv
- hdfsDataDir = getHdfsDataDir()
+ def hdfsDataDir = getHdfsDataDir()
label = UUID.randomUUID().toString().replaceAll("-", "")
export_to_hdfs.call(testTable, label, hdfsDataDir + "/" + label,
format_csv, brokerName, hdfsUser, hdfsPasswd)
check_export_result(label)
- result = downloadExportFromHdfs(label + "/export-data")
+ def result = downloadExportFromHdfs(label + "/export-data")
check_download_result(result, format_csv, currentTotalRows)
// export table to hdfs format=csv_with_names
label = UUID.randomUUID().toString().replaceAll("-", "")
export_to_hdfs.call(testTable, label, hdfsDataDir + "/" + label,
format_csv_with_names, brokerName, hdfsUser, hdfsPasswd)
check_export_result(label)
- def result = downloadExportFromHdfs(label + "/export-data")
+ result = downloadExportFromHdfs(label + "/export-data")
check_download_result(result, format_csv_with_names, currentTotalRows)
// export table to hdfs format=csv_with_names_and_types
@@ -205,7 +205,7 @@ suite("test_csv_with_header") {
check_download_result(result, format_csv_with_names_and_types,
currentTotalRows)
// select out file to hdfs
- select_out_file = {outTable, outHdfsPath, outFormat, outHdfsFs,
outBroker, outHdfsUser, outPasswd->
+ def select_out_file = {outTable, outHdfsPath, outFormat, outHdfsFs,
outBroker, outHdfsUser, outPasswd->
sql "sync"
sql """
SELECT * FROM ${outTable}
diff --git a/regression-test/suites/export/test_array_export.groovy
b/regression-test/suites/export/test_array_export.groovy
index 4ce9786e997..14150c13648 100644
--- a/regression-test/suites/export/test_array_export.groovy
+++ b/regression-test/suites/export/test_array_export.groovy
@@ -210,21 +210,21 @@ suite("test_array_export", "export") {
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
- hdfsDataDir = getHdfsDataDir()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
+ def hdfsDataDir = getHdfsDataDir()
// case2: test "select ...into outfile 'hdfs_path'"
try {
create_test_table.call(tableName)
- resultCount = sql "select count(*) from ${tableName}"
+ def resultCount = sql "select count(*) from ${tableName}"
currentTotalRows = resultCount[0][0]
- label = UUID.randomUUID().toString().replaceAll("-", "")
+ def label = UUID.randomUUID().toString().replaceAll("-", "")
select_out_file(tableName, hdfsDataDir + "/" + label +
"/export-data", "csv", brokerName, hdfsUser, hdfsPasswd)
- result = downloadExportFromHdfs(label + "/export-data")
+ def result = downloadExportFromHdfs(label + "/export-data")
check_download_result(result, currentTotalRows)
} finally {
try_sql("DROP TABLE IF EXISTS ${tableName}")
@@ -234,13 +234,13 @@ suite("test_array_export", "export") {
try {
create_test_table.call(tableName)
- resultCount = sql "select count(*) from ${tableName}"
+ def resultCount = sql "select count(*) from ${tableName}"
currentTotalRows = resultCount[0][0]
- label = UUID.randomUUID().toString().replaceAll("-", "")
+ def label = UUID.randomUUID().toString().replaceAll("-", "")
export_to_hdfs.call(tableName, label, hdfsDataDir + "/" + label,
'', brokerName, hdfsUser, hdfsPasswd)
check_export_result(label)
- result = downloadExportFromHdfs(label + "/export-data")
+ def result = downloadExportFromHdfs(label + "/export-data")
check_download_result(result, currentTotalRows)
} finally {
try_sql("DROP TABLE IF EXISTS ${tableName}")
diff --git a/regression-test/suites/load_p0/broker_load/test_array_load.groovy
b/regression-test/suites/load_p0/broker_load/test_array_load.groovy
index fe2985aff4a..6ec6a601d0f 100644
--- a/regression-test/suites/load_p0/broker_load/test_array_load.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_array_load.groovy
@@ -246,9 +246,9 @@ suite("test_array_load", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName =getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName =getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_json_file_path = uploadToHdfs
"load_p0/broker_load/simple_object_array.json"
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/simple_array.csv"
def hdfs_orc_file_path = uploadToHdfs
"load_p0/broker_load/simple_array.orc"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
index 2529226382c..b9851c11789 100644
--- a/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
+++ b/regression-test/suites/load_p0/broker_load/test_broker_load_seq.groovy
@@ -95,9 +95,9 @@ suite("test_broker_load_seq", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
index eb76e4d389a..24bdee97613 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_merge.groovy
@@ -174,9 +174,9 @@ suite("test_broker_load_with_merge", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_merge.csv"
// case1: has delete on condition and without merge
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
index 45f0cc50be7..aa575d6c507 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_partition.groovy
@@ -148,9 +148,9 @@ suite("test_broker_load_with_partition", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_partition.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
index 852a3ef0ae7..1386d7efc1c 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_properties.groovy
@@ -95,9 +95,9 @@ suite("test_broker_load_with_properties", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_properties.json"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
index dd05905e4bb..e6be14958b8 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_with_where.groovy
@@ -125,9 +125,9 @@ suite("test_broker_load_with_where", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_with_where.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
index 2dae37c615b..c89ba78fc32 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy
@@ -78,9 +78,9 @@ suite("test_broker_load_without_filepath", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_without_filepath.csv"
try {
diff --git
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
index 9d34c0c0978..b5a487ae562 100644
---
a/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
+++
b/regression-test/suites/load_p0/broker_load/test_broker_load_without_seq.groovy
@@ -79,9 +79,9 @@ suite("test_broker_load_without_seq", "load_p0") {
// if 'enableHdfs' in regression-conf.groovy has been set to true,
// the test will run these case as below.
if (enableHdfs()) {
- brokerName = getBrokerName()
- hdfsUser = getHdfsUser()
- hdfsPasswd = getHdfsPasswd()
+ def brokerName = getBrokerName()
+ def hdfsUser = getHdfsUser()
+ def hdfsPasswd = getHdfsPasswd()
def hdfs_csv_file_path = uploadToHdfs
"load_p0/broker_load/broker_load_without_seq.csv"
//def hdfs_csv_file_path = "hdfs://ip:port/testfile"
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
index a6e213bba89..18bbd913e91 100644
--- a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
+++ b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load.groovy
@@ -126,7 +126,7 @@ suite('test_ingestion_load', 'p0') {
if (enableHdfs()) {
- tableName = 'tbl_test_spark_load'
+ def tableName = 'tbl_test_spark_load'
sql """
CREATE TABLE IF NOT EXISTS ${tableName} (
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
index 4a56663d629..0d040492da2 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_column.groovy
@@ -129,8 +129,8 @@ suite('test_ingestion_load_alter_column', 'p0') {
if (enableHdfs()) {
- tableName1 = 'tbl_test_spark_load_alter_column_1'
- tableName2 = 'tbl_test_spark_load_alter_column_2'
+ def tableName1 = 'tbl_test_spark_load_alter_column_1'
+ def tableName2 = 'tbl_test_spark_load_alter_column_2'
try {
@@ -160,7 +160,7 @@ suite('test_ingestion_load_alter_column', 'p0') {
)
"""
- label = "test_ingestion_load_alter_column_1"
+ def label = "test_ingestion_load_alter_column_1"
testIngestLoadJob.call(tableName1, label, context.config.dataPath
+ '/load_p0/ingestion_load/data.parquet', {
sql "alter table ${tableName1} drop column c_datetimev2"
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
index de919357102..61c1587ea53 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_alter_partition.groovy
@@ -140,9 +140,9 @@ suite('test_ingestion_load_alter_partition', 'p0') {
if (enableHdfs()) {
- tableName1 = 'tbl_test_spark_load_alter_partition_1'
- tableName2 = 'tbl_test_spark_load_alter_partition_2'
- tableName3 = 'tbl_test_spark_load_alter_partition_3'
+ def tableName1 = 'tbl_test_spark_load_alter_partition_1'
+ def tableName2 = 'tbl_test_spark_load_alter_partition_2'
+ def tableName3 = 'tbl_test_spark_load_alter_partition_3'
try {
@@ -162,7 +162,7 @@ suite('test_ingestion_load_alter_partition', 'p0') {
)
"""
- label = "test_ingestion_load_alter_partition_1"
+ def label = "test_ingestion_load_alter_partition_1"
testIngestLoadJob.call(tableName1, label, [context.config.dataPath
+ '/load_p0/ingestion_load/data2-0.parquet', context.config.dataPath +
'/load_p0/ingestion_load/data2-1.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-2.parquet',context.config.dataPath +
'/load_p0/ingestion_load/data2-3.parquet'], {
sql "alter table ${tableName1} drop partition p_20240901"
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
index 4f245c3d535..e348b9db9ff 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_drop_table.groovy
@@ -126,7 +126,7 @@ suite('test_ingestion_load_drop_table', 'p0') {
if (enableHdfs()) {
- tableName = 'tbl_test_spark_load_drop_table'
+ def tableName = 'tbl_test_spark_load_drop_table'
try {
@@ -156,7 +156,7 @@ suite('test_ingestion_load_drop_table', 'p0') {
)
"""
- label = "test_ingestion_load_drop_table"
+ def label = "test_ingestion_load_drop_table"
testIngestLoadJob.call(tableName, label, context.config.dataPath +
'/load_p0/ingestion_load/data.parquet', {
sql "DROP TABLE ${tableName}"
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
index 67455d8c692..4a4199a9b81 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_multi_table.groovy
@@ -145,7 +145,7 @@ suite('test_ingestion_load_multi_table', 'p0') {
if (enableHdfs()) {
- tableName1 = 'tbl_test_spark_load_multi_1'
+ def tableName1 = 'tbl_test_spark_load_multi_1'
sql """
CREATE TABLE IF NOT EXISTS ${tableName1} (
@@ -172,7 +172,7 @@ suite('test_ingestion_load_multi_table', 'p0') {
)
"""
- tableName2 = 'tbl_test_spark_load_multi_2'
+ def tableName2 = 'tbl_test_spark_load_multi_2'
sql """
CREATE TABLE IF NOT EXISTS ${tableName2} (
diff --git
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
index 97ebb7a0761..3c024ab1edb 100644
---
a/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
+++
b/regression-test/suites/load_p0/ingestion_load/test_ingestion_load_with_partition.groovy
@@ -133,7 +133,7 @@ suite('test_ingestion_load_with_partition', 'p0') {
if (enableHdfs()) {
- tableName = 'tbl_test_spark_load_partition'
+ def tableName = 'tbl_test_spark_load_partition'
sql """
CREATE TABLE IF NOT EXISTS ${tableName} (
diff --git
a/regression-test/suites/restore_p0/test_unique_validate_restore.groovy
b/regression-test/suites/restore_p0/test_unique_validate_restore.groovy
index 090703cbe1f..6cceba4807e 100644
--- a/regression-test/suites/restore_p0/test_unique_validate_restore.groovy
+++ b/regression-test/suites/restore_p0/test_unique_validate_restore.groovy
@@ -102,6 +102,5 @@ suite("test_unique_validate_restore", "validate_restore") {
}
runUniqueValidateRestore("3.0")
- //runUniqueValidateRestore("2.1")
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]