This is an automated email from the ASF dual-hosted git repository.

zhangstar333 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 2703b385c15 [ci](cloud) add cloud p0 pipeline (#30772)
2703b385c15 is described below

commit 2703b385c15b0a99cd038f2059cdc3d22659168a
Author: Dongyang Li <[email protected]>
AuthorDate: Mon Feb 5 11:52:17 2024 +0800

    [ci](cloud) add cloud p0 pipeline (#30772)
    
    * [ci](cloud) add cloud p0 pipeline
    Co-authored-by: stephen <[email protected]>
---
 .github/workflows/comment-to-trigger-teamcity.yml  |  38 ++-
 .licenserc.yaml                                    |   2 +-
 be/src/cloud/config.h                              |   2 +-
 regression-test/pipeline/cloud_p0/clean.sh         |  30 ++
 .../pipeline/cloud_p0/conf/be_custom.conf          |  32 +++
 .../pipeline/cloud_p0/conf/fe_custom.conf          |  38 +++
 .../cloud_p0/conf/regression-conf-custom.groovy    |   2 +
 .../pipeline/cloud_p0/conf/session_variables.sql   |   4 +
 regression-test/pipeline/cloud_p0/deploy.sh        | 101 +++++++
 .../pipeline/{performance => cloud_p0}/prepare.sh  |  81 +++---
 regression-test/pipeline/cloud_p0/run.sh           |  99 +++++++
 regression-test/pipeline/common/doris-utils.sh     | 304 ++++++++++++++++++---
 regression-test/pipeline/common/github-utils.sh    |  15 +
 regression-test/pipeline/common/oss-utils.sh       |   2 +
 regression-test/pipeline/common/teamcity-utils.sh  |   8 +-
 regression-test/pipeline/performance/prepare.sh    |   2 +-
 .../tpch/tpch-sf100/conf/regression-conf.groovy    | 112 --------
 17 files changed, 653 insertions(+), 219 deletions(-)

diff --git a/.github/workflows/comment-to-trigger-teamcity.yml 
b/.github/workflows/comment-to-trigger-teamcity.yml
index 1385b52368c..b561483aaa4 100644
--- a/.github/workflows/comment-to-trigger-teamcity.yml
+++ b/.github/workflows/comment-to-trigger-teamcity.yml
@@ -46,6 +46,7 @@ jobs:
             "${COMMENT_BODY}" == *'run p1'* ||
             "${COMMENT_BODY}" == *'run external'* ||
             "${COMMENT_BODY}" == *'run pipelinex_p0'* ||
+            "${COMMENT_BODY}" == *'run cloud_p0'* ||
             "${COMMENT_BODY}" == *'run arm'* ||
             "${COMMENT_BODY}" == *'run performance'* ]]; then
             echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
@@ -63,7 +64,7 @@ jobs:
         echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
         echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
 
-        reg="run 
(buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|arm|performance)(
 [1-9]*[0-9]+)*"
+        reg="run 
(buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|cloud_p0|arm|performance)(
 [1-9]*[0-9]+)*"
         COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E 
"${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
         COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E 
"${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
         echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a 
"$GITHUB_OUTPUT"
@@ -116,6 +117,11 @@ jobs:
           else
             echo "changed_performance=false" | tee -a "$GITHUB_OUTPUT"
           fi
+          if file_changed_cloud_p0; then
+            echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
+          else
+            echo "changed_cloud_p0=false" | tee -a "$GITHUB_OUTPUT"
+          fi
         else
           echo "INFO: failed to _get_pr_changed_files, default trigger all"
           echo "changed_fe_ut=true" | tee -a "$GITHUB_OUTPUT"
@@ -127,6 +133,7 @@ jobs:
           echo "changed_arm=true" | tee -a "$GITHUB_OUTPUT"
           echo "changed_p1=true" | tee -a "$GITHUB_OUTPUT"
           echo "changed_performance=true" | tee -a "$GITHUB_OUTPUT"
+          echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
         fi
 
     # - name: "Setup tmate session"
@@ -243,6 +250,21 @@ jobs:
           "pipelinex_p0" \
           "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
 
+    - name: "Trigger or Skip cloud_p0"
+      if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && 
contains(fromJSON('["cloud_p0", "buildall"]'), 
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
+      run: |
+        source ./regression-test/pipeline/common/teamcity-utils.sh
+        if [[ ${{ steps.parse.outputs.COMMENT_TRIGGER_TYPE }} == "buildall" 
]]; then
+          echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, 
compile will trigger cloud_p0" && exit
+        fi
+        set -x
+        trigger_or_skip_build \
+          "${{ steps.changes.outputs.changed_cloud_p0 }}" \
+          "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
+          "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
+          "cloud_p0" \
+          "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
+
     - name: "Trigger or Skip arm"
       if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && 
contains(fromJSON('["arm", "buildall"]'), 
steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
       run: |
@@ -269,13 +291,6 @@ jobs:
             "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
             "performance" \
             "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
-
-          trigger_or_skip_build \
-            "${{ steps.changes.outputs.changed_performance }}" \
-            "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
-            "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
-            "perf" \
-            "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
         else
           echo "PR target branch not in (master, branch-2.0), skip run 
performance"
           trigger_or_skip_build \
@@ -284,11 +299,4 @@ jobs:
             "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
             "performance" \
             "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
-
-          trigger_or_skip_build \
-            "false" \
-            "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
-            "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
-            "perf" \
-            "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
         fi
diff --git a/.licenserc.yaml b/.licenserc.yaml
index 3f41200df2e..0064d37814b 100644
--- a/.licenserc.yaml
+++ b/.licenserc.yaml
@@ -88,7 +88,7 @@ header:
     - "conf/mysql_ssl_default_certificate/client_certificate/client-cert.pem"
     - "conf/mysql_ssl_default_certificate/client_certificate/client-key.pem"
     - "regression-test/ssl_default_certificate/*"
-    - "regression-test/pipeline/performance/**"
+    - "regression-test/pipeline/**"
     - "extension/beats/go.mod"
     - "extension/beats/go.sum"
     - "pytest/hdfs"
diff --git a/be/src/cloud/config.h b/be/src/cloud/config.h
index 779fc089e7d..2cbf213320d 100644
--- a/be/src/cloud/config.h
+++ b/be/src/cloud/config.h
@@ -48,7 +48,7 @@ DECLARE_mInt32(tablet_sync_interval_s);
 
 // Cloud compaction config
 DECLARE_mInt64(min_compaction_failure_interval_ms);
-// For cloud read/write seperate mode
+// For cloud read/write separate mode
 DECLARE_mInt64(base_compaction_freeze_interval_s);
 DECLARE_mInt64(cu_compaction_freeze_interval_s);
 DECLARE_mInt64(cumu_compaction_interval_s);
diff --git a/regression-test/pipeline/cloud_p0/clean.sh 
b/regression-test/pipeline/cloud_p0/clean.sh
new file mode 100644
index 00000000000..8f7aa3bba81
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/clean.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line 
#######################
+: <<EOF
+#!/bin/bash
+export PATH=/usr/local/software/apache-maven-3.6.3/bin:${PATH}
+if [[ -f 
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/cloud_p0/clean.sh 
]]; then
+    cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/
+    bash -x clean.sh
+else
+    echo "Build Step file missing: regression-test/pipeline/cloud_p0/clean.sh" 
&& exit 1
+fi
+EOF
+############################# clean.sh content 
########################################
+# shellcheck source=/dev/null
+# stop_doris, clean_fdb
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env 
teamcity_build_checkoutDir not set" && exit 1; fi
+
+# shellcheck source=/dev/null
+source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
+
+echo "#### Run tpcds test on Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+stop_doris
+clean_fdb
diff --git a/regression-test/pipeline/cloud_p0/conf/be_custom.conf 
b/regression-test/pipeline/cloud_p0/conf/be_custom.conf
new file mode 100644
index 00000000000..ae08823bfb7
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/conf/be_custom.conf
@@ -0,0 +1,32 @@
+streaming_load_rpc_max_alive_time_sec = 72000
+quick_cooldown = true
+disable_stream_load_2pc=false
+enable_vectorized_alter_table = true
+enable_new_scan_node = true
+push_worker_count_high_priority = 2
+streaming_load_max_mb = 107374182400
+clear_file_cache=true
+enable_file_cache=true
+mem_limit=50%
+#disable_storage_page_cache = true
+enable_file_cache_query_limit=true
+file_cache_max_file_segment_size=1048576
+s3_write_buffer_whole_size=52428800
+enable_vertical_compaction=true
+fuzzy_vertical_compaction=true
+vacuum_stale_rowsets_interval_seconds=60
+tablet_rowset_stale_sweep_time_sec=300
+user_files_secure_path=/
+enable_file_cache_as_load_buffer=true
+enable_merge_on_write_correctness_check=true
+enable_debug_points=true
+prioritize_query_perf_in_compaction = true
+cumulative_compaction_min_deltas = 5
+wait_internal_group_commit_finish=true
+#p0 parameter
+meta_service_endpoint = 127.0.0.1:5000
+cloud_unique_id = cloud_unique_id_compute_node0
+meta_service_use_load_balancer = false
+enable_file_cache = true
+file_cache_path = 
[{"path":"/data/doris_cloud/file_cache","total_size":104857600,"query_limit":104857600}]
+tmp_file_dirs = 
[{"path":"/data/doris_cloud/tmp","max_cache_bytes":104857600,"max_upload_bytes":104857600}]
\ No newline at end of file
diff --git a/regression-test/pipeline/cloud_p0/conf/fe_custom.conf 
b/regression-test/pipeline/cloud_p0/conf/fe_custom.conf
new file mode 100644
index 00000000000..9a6c6cdeb54
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/conf/fe_custom.conf
@@ -0,0 +1,38 @@
+stream_load_default_timeout_second = 72000
+replication_num_forced_in_cloud_mode = true
+ignore_unsupported_properties_in_cloud_mode = true
+enable_array_type = true
+tablet_stat_update_interval_second = 10
+catalog_trash_expire_second = 600
+cloud_delete_loaded_internal_stage_files = true
+merge_on_write_forced_to_false = true
+enable_ssl = true
+light_schema_change_force_to_true = true
+enable_mtmv = true
+remote_fragment_exec_timeout_ms=60000
+dynamic_partition_check_interval_seconds=10
+use_fuzzy_session_variable=true
+
+enable_cloud_snapshot_version = true
+enable_auto_collect_statistics = false
+
+forbid_function_stmt = false
+forbid_insecurity_stmt = false
+
+enable_debug_points = true
+
+disable_datev1=false
+
+disable_decimalv2=false
+max_query_profile_num=1000
+
+statistics_sql_mem_limit_in_bytes=21474836480
+cpu_resource_limit_per_analyze_task=-1
+
+group_commit_interval_ms_default_value=2
+wait_internal_group_commit_finish=true
+
+priority_networks=127.0.0.1/24
+cloud_http_port=18030
+meta_service_endpoint=127.0.0.1:5000
+cloud_unique_id=cloud_unique_id_sql_server00
diff --git 
a/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy 
b/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
new file mode 100644
index 00000000000..2cfe28f6601
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
@@ -0,0 +1,2 @@
+testGroups = "p0"
+testDirectories = "table_p0"
\ No newline at end of file
diff --git a/regression-test/pipeline/cloud_p0/conf/session_variables.sql 
b/regression-test/pipeline/cloud_p0/conf/session_variables.sql
new file mode 100644
index 00000000000..cb786ace296
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/conf/session_variables.sql
@@ -0,0 +1,4 @@
+-- set those session variables before run cloud p0 regression
+set global insert_visible_timeout_ms=60000;
+set global enable_auto_analyze=false;
+set global enable_audit_plugin=true;
\ No newline at end of file
diff --git a/regression-test/pipeline/cloud_p0/deploy.sh 
b/regression-test/pipeline/cloud_p0/deploy.sh
new file mode 100644
index 00000000000..5f22451e75a
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/deploy.sh
@@ -0,0 +1,101 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line 
#######################
+: <<EOF
+#!/bin/bash
+
+if [[ -f 
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/cloud_p0/deploy.sh 
]]; then
+    cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0
+    bash -x deploy.sh
+else
+    echo "Build Step file missing: 
regression-test/pipeline/cloud_p0/deploy.sh" && exit 1
+fi
+EOF
+#####################################################################################
+
+########################## deploy.sh content 
########################################
+# shellcheck source=/dev/null
+source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
+
+# shellcheck source=/dev/null
+# upload_doris_log_to_oss, download_oss_file
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh
+# shellcheck source=/dev/null
+# stop_doris, install_fdb, clean_fdb, print_doris_conf,
+# start_doris_fe, get_doris_conf_value, start_doris_be,
+# print_doris_fe_log, print_doris_be_log, archive_doris_logs
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+
+if ${DEBUG:-false}; then
+    pull_request_num="30772"
+    commit_id="8a0077c2cfc492894d9ff68916e7e131f9a99b65"
+    target_branch="master"
+fi
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env 
teamcity_build_checkoutDir not set" && exit 1; fi
+if [[ -z "${pull_request_num}" ]]; then echo "ERROR: env pull_request_num not 
set" && exit 1; fi
+if [[ -z "${commit_id}" ]]; then echo "ERROR: env commit_id not set" && exit 
1; fi
+if [[ -z "${target_branch}" ]]; then echo "ERROR: env target_branch not set" 
&& exit 1; fi
+
+echo "#### Deploy Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+exit_flag=0
+(
+    echo "#### 1. download doris binary"
+    cd "${teamcity_build_checkoutDir}"
+    export OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile_result"}"
+    if download_oss_file "${pull_request_num}_${commit_id}.tar.gz"; then
+        rm -rf "${teamcity_build_checkoutDir}"/output/*
+        tar -I pigz -xf "${pull_request_num}_${commit_id}.tar.gz"
+    else exit 1; fi
+
+    echo "#### 2. try to kill old doris process and clean foundationdb"
+    stop_doris
+    install_fdb && clean_fdb "cloud_instance_0"
+
+    set -e
+    echo "#### 3. copy conf from regression-test/pipeline/cloud_p0/conf/ and 
modify"
+    cp -rf "${DORIS_HOME}"/ms/ "${DORIS_HOME}"/recycler/
+    cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/fe_custom.conf
 "${DORIS_HOME}"/fe/conf/
+    cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/be_custom.conf
 "${DORIS_HOME}"/be/conf/
+    fdb_cluster="$(cat /etc/foundationdb/fdb.cluster)"
+    sed -i "s/^fdb_cluster = .*/fdb_cluster = ${fdb_cluster}/" 
"${DORIS_HOME}"/ms/conf/doris_cloud.conf
+    sed -i "s/^fdb_cluster = .*/fdb_cluster = ${fdb_cluster}/" 
"${DORIS_HOME}"/recycler/conf/doris_cloud.conf
+    sed -i "s/^brpc_listen_port = .*/fbrpc_listen_port = 6000/" 
"${DORIS_HOME}"/recycler/conf/doris_cloud.conf
+    print_doris_conf
+
+    echo "#### 4. start Doris"
+    if ! start_doris_ms; then exit 1; fi
+    if ! start_doris_recycler; then exit 1; fi
+    if ! create_warehouse; then exit 1; fi
+    if ! warehouse_add_fe; then exit 1; fi
+    if ! warehouse_add_be; then exit 1; fi
+    if ! start_doris_fe; then exit 1; fi
+    if ! start_doris_be; then exit 1; fi
+    if ! check_doris_ready; then exit 1; fi
+
+    echo "#### 5. set session variables"
+    if ! reset_doris_session_variables; then exit 1; fi
+    
session_variables_file="${teamcity_build_checkoutDir}/regression-test/pipeline/cloud_p0/conf/session_variables.sql"
+    echo -e "\n\ntuned session variables:\n$(cat 
"${session_variables_file}")\n\n"
+    set_doris_session_variables_from_file "${session_variables_file}"
+    # record session variables
+    set +x
+    show_session_variables &>"${DORIS_HOME}"/session_variables
+)
+exit_flag="$?"
+
+echo "#### 5. check if need backup doris logs"
+if [[ ${exit_flag} != "0" ]]; then
+    stop_doris
+    print_doris_fe_log
+    print_doris_be_log
+    if file_name=$(archive_doris_logs 
"${pull_request_num}_${commit_id}_doris_logs.tar.gz"); then
+        upload_doris_log_to_oss "${file_name}"
+    fi
+fi
+
+exit "${exit_flag}"
+#####################################################################################
diff --git a/regression-test/pipeline/performance/prepare.sh 
b/regression-test/pipeline/cloud_p0/prepare.sh
similarity index 52%
copy from regression-test/pipeline/performance/prepare.sh
copy to regression-test/pipeline/cloud_p0/prepare.sh
index ee3c8e0a990..4c4b2c35be6 100644
--- a/regression-test/pipeline/performance/prepare.sh
+++ b/regression-test/pipeline/cloud_p0/prepare.sh
@@ -1,47 +1,38 @@
 #!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
 
-# Build Step: Command Line
+########################### Teamcity Build Step: Command Line 
#######################
 : <<EOF
 #!/bin/bash
 
 set -x
 pwd
 rm -rf ../.old/*
-set +x
 
-if [[ -f 
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/performance/prepare.sh
 ]]; then
-    cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/
+teamcity_build_checkoutDir="%teamcity.build.checkoutDir%"
+if [[ -f 
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/cloud_p0/prepare.sh 
]]; then
+    cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/
     bash prepare.sh
 else
-    echo "Build Step file missing: 
regression-test/pipeline/performance/prepare.sh" && exit 1
+    echo "Build Step file missing: 
regression-test/pipeline/cloud_p0/prepare.sh" && exit 1
 fi
 EOF
-
 
#####################################################################################
-## run.sh content ##
+## prepare.sh content ##
 
 if ${DEBUG:-false}; then
-    pull_request_num="28431"
-    commit_id_from_trigger="5f5c4c80564c76ff4267fc4ce6a5408498ed1ab5"
-    commit_id="5f5c4c80564c76ff4267fc4ce6a5408498ed1ab5" # teamcity checkout 
commit id
+    pull_request_num="30772"
+    commit_id_from_trigger="8a0077c2cfc492894d9ff68916e7e131f9a99b65"
+    commit_id="8a0077c2cfc492894d9ff68916e7e131f9a99b65" # teamcity checkout 
commit id
     target_branch="master"
 fi
+
+# shellcheck source=/dev/null
+# stop_doris, clean_fdb, install_fdb
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+# shellcheck source=/dev/null
+# check_oss_file_exist
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh
+
 echo "#### Check env"
 if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env 
teamcity_build_checkoutDir not set" && exit 1; fi
 if [[ -z "${pull_request_num}" ]]; then echo "ERROR: env pull_request_num not 
set" && exit 1; fi
@@ -64,6 +55,7 @@ if [[ "${commit_id_from_trigger}" != 
"${commit_id_from_checkout}" ]]; then
     commit_id_from_trigger is outdate"
     exit 1
 fi
+
 # shellcheck source=/dev/null
 source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
 if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
@@ -74,11 +66,12 @@ else
     bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
     exit 0
 fi
+
 # shellcheck source=/dev/null
 # _get_pr_changed_files file_changed_performance
 source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh
 if _get_pr_changed_files "${pull_request_num}"; then
-    if ! file_changed_performance; then
+    if ! file_changed_cloud_p0; then
         bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'set' "export skip_pipeline=true"
         exit 0
     fi
@@ -86,31 +79,23 @@ fi
 
 echo "#### 2. check if tpch depending files exist"
 set -x
-if ! [[ -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh &&
+if ! [[ -d "${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/ 
&&
+    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh &&
     -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh 
&&
     -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh 
&&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/be_custom.conf
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/custom_env.sh
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/conf/fe_custom.conf
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/conf/be_custom.conf
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/conf/fe_custom.conf
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/conf/opt_session_variables.sql
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/check-query-result.sh
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/queries-sort.sql
 &&
-    -d 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/clickbench/query-result-target/
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/prepare.sh 
&&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/compile.sh 
&&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/deploy.sh 
&&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/run-tpch.sh
 &&
-    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/performance/run-tpcds.sh
 &&
-    -f 
"${teamcity_build_checkoutDir}"/tools/tpch-tools/bin/run-tpch-queries.sh &&
-    -f 
"${teamcity_build_checkoutDir}"/tools/tpcds-tools/bin/run-tpcds-queries.sh ]]; 
then
+    -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 ]]; then
     echo "ERROR: depending files missing" && exit 1
 fi
 
 echo "#### 3. try to kill old doris process"
-# shellcheck source=/dev/null
-# stop_doris
-source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
 stop_doris
+
+echo "#### 4. prepare fundationdb"
+install_fdb
+clean_fdb
+
+echo "#### 5. check if binary package ready"
+export OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile_result"}"
+if ! check_oss_file_exist 
"${pull_request_num}_${commit_id_from_trigger}.tar.gz"; then return 1; fi
diff --git a/regression-test/pipeline/cloud_p0/run.sh 
b/regression-test/pipeline/cloud_p0/run.sh
new file mode 100644
index 00000000000..62434258e5c
--- /dev/null
+++ b/regression-test/pipeline/cloud_p0/run.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+########################### Teamcity Build Step: Command Line 
#######################
+: <<EOF
+#!/bin/bash
+export 
PATH=/usr/local/software/jdk1.8.0_131/bin:/usr/local/software/apache-maven-3.6.3/bin:${PATH}
+if [[ -f 
"${teamcity_build_checkoutDir:-}"/regression-test/pipeline/cloud_p0/run.sh ]]; 
then
+    cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/
+    bash -x run.sh
+else
+    echo "Build Step file missing: regression-test/pipeline/cloud_p0/run.sh" 
&& exit 1
+fi
+EOF
+############################# run.sh content 
########################################
+# shellcheck source=/dev/null
+# check_tpcds_table_rows, restart_doris, set_session_variable, 
check_tpcds_result
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
+# shellcheck source=/dev/null
+# create_an_issue_comment
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh
+# shellcheck source=/dev/null
+# upload_doris_log_to_oss
+source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh
+
+if ${DEBUG:-false}; then
+    pull_request_num="28431"
+    commit_id="5f5c4c80564c76ff4267fc4ce6a5408498ed1ab5"
+fi
+echo "#### Check env"
+if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env 
teamcity_build_checkoutDir not set" && exit 1; fi
+if [[ -z "${pull_request_num}" ]]; then echo "ERROR: env pull_request_num not 
set" && exit 1; fi
+if [[ -z "${commit_id}" ]]; then echo "ERROR: env commit_id not set" && exit 
1; fi
+
+# shellcheck source=/dev/null
+source "$(bash 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh
 'get')"
+if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; 
else echo "INFO: no skip"; fi
+
+echo "#### Run tpcds test on Doris ####"
+DORIS_HOME="${teamcity_build_checkoutDir}/output"
+export DORIS_HOME
+exit_flag=0
+
+# shellcheck disable=SC2317
+run() {
+    set -e
+    shopt -s inherit_errexit
+
+    cd "${teamcity_build_checkoutDir}" || return 1
+    cp -f 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p0/conf/regression-conf-custom.groovy
 \
+        "${teamcity_build_checkoutDir}"/regression-test/conf/
+    if "${teamcity_build_checkoutDir}"/run-regression-test.sh \
+        --teamcity \
+        --clean \
+        --run \
+        --times "${repeat_times_from_trigger:-1}" \
+        -parallel 14 \
+        -suiteParallel 14 \
+        -actionParallel 2; then
+        echo
+    else
+        # regression 测试跑完后输出的汇总信息,Test 1961 suites, failed 1 suites, fatal 0 
scripts, skipped 0 scripts
+        # 如果 test_suites>0 && failed_suites<=3  && 
fatal_scripts=0,就把返回状态码改为正常的0,让teamcity根据跑case的情况去判断成功还是失败
+        # 这样预期能够快速 mute 不稳定的 case
+        summary=$(
+            grep -aoE 'Test ([0-9]+) suites, failed ([0-9]+) suites, fatal 
([0-9]+) scripts, skipped ([0-9]+) scripts' \
+                "${DORIS_HOME}"/regression-test/log/doris-regression-test.*.log
+        )
+        set -x
+        test_suites=$(echo "${summary}" | cut -d ' ' -f 2)
+        failed_suites=$(echo "${summary}" | cut -d ' ' -f 5)
+        fatal_scripts=$(echo "${summary}" | cut -d ' ' -f 8)
+        if [[ ${test_suites} -gt 0 && ${failed_suites} -le 30 && 
${fatal_scripts} -eq 0 ]]; then
+            echo "INFO: regression test result meet (test_suites>0 && 
failed_suites<=30 && fatal_scripts=0)"
+        else
+            return 1
+        fi
+    fi
+}
+export -f run
+# 设置超时时间(以分为单位)
+timeout_minutes=$((${repeat_times_from_trigger:-1} * 90))m
+timeout "${timeout_minutes}" bash -cx run
+exit_flag="$?"
+
+echo "#### 5. check if need backup doris logs"
+if [[ ${exit_flag} != "0" ]]; then
+    check_if_need_gcore
+    if file_name=$(archive_doris_coredump 
"${pull_request_num}_${commit_id}_coredump.tar.gz"); then
+        upload_doris_log_to_oss "${file_name}"
+    fi
+    stop_doris
+    print_doris_fe_log
+    print_doris_be_log
+    if file_name=$(archive_doris_logs 
"${pull_request_num}_${commit_id}_doris_logs.tar.gz"); then
+        upload_doris_log_to_oss "${file_name}"
+    fi
+fi
+
+exit "${exit_flag}"
diff --git a/regression-test/pipeline/common/doris-utils.sh 
b/regression-test/pipeline/common/doris-utils.sh
index df575f59957..d6e2c420744 100644
--- a/regression-test/pipeline/common/doris-utils.sh
+++ b/regression-test/pipeline/common/doris-utils.sh
@@ -51,6 +51,44 @@ function set_doris_conf_value() {
 # get_doris_conf_value "$1" "$2"
 # set_doris_conf_value "$1" "$2" "$3"
 
+function start_doris_ms() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    cd "${DORIS_HOME}"/ms || return 1
+    if ! ./bin/start.sh --meta-service --daemonized; then
+        echo "ERROR: start doris meta-service failed." && return 1
+    fi
+    local i=1
+    while [[ $((i++)) -lt 5 ]]; do
+        if ! pgrep -fia 'doris_cloud --meta-service' >/dev/null; then
+            echo "ERROR: start doris meta-service failed." && return 1
+        else
+            sleep 1
+        fi
+    done
+    if [[ ${i} -ge 5 ]]; then
+        echo -e "INFO: doris meta-service 
started,\n$("${DORIS_HOME}"/ms/lib/doris_cloud --version)"
+    fi
+}
+
+function start_doris_recycler() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    cd "${DORIS_HOME}"/recycler || return 1
+    if ! ./bin/start.sh --recycler --daemonized; then
+        echo "ERROR: start doris recycler failed." && return 1
+    fi
+    local i=1
+    while [[ $((i++)) -lt 5 ]]; do
+        if ! pgrep -fia 'doris_cloud --recycler' >/dev/null; then
+            echo "ERROR: start doris recycler failed." && return 1
+        else
+            sleep 1
+        fi
+    done
+    if [[ ${i} -ge 5 ]]; then
+        echo -e "INFO: doris recycler 
started,\n$("${DORIS_HOME}"/ms/lib/doris_cloud --version)"
+    fi
+}
+
 function start_doris_fe() {
     if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
     if ! java -version >/dev/null ||
@@ -110,18 +148,22 @@ function add_doris_be_to_fe() {
     query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf 
query_port)
     heartbeat_service_port=$(get_doris_conf_value 
"${DORIS_HOME}"/be/conf/be.conf heartbeat_service_port)
     cl="mysql -h127.0.0.1 -P${query_port} -uroot "
-    if ${cl} -e "ALTER SYSTEM ADD BACKEND 
'127.0.0.1:${heartbeat_service_port}';"; then echo; else echo; fi
+    if ${cl} -e "ALTER SYSTEM ADD BACKEND 
'127.0.0.1:${heartbeat_service_port}';"; then echo; else return 1; fi
+    check_doris_ready
+}
 
+function check_doris_ready() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
     i=1
     while [[ $((i++)) -lt 60 ]]; do
         if be_ready_count=$(${cl} -e 'show backends\G' | grep -c 'Alive: 
true') &&
             [[ ${be_ready_count} -eq 1 ]]; then
-            echo -e "INFO: add doris be success, be version: \n$(${cl} -e 
'show backends\G' | grep 'Version')" && break
+            echo -e "INFO: Doris cluster ready, be version: \n$(${cl} -e 'show 
backends\G' | grep 'Version')" && break
         else
-            echo 'Wait for Backends ready, sleep 2 seconds ...' && sleep 2
+            echo 'Wait for backends ready, sleep 2 seconds ...' && sleep 2
         fi
     done
-    if [[ ${i} -ge 60 ]]; then echo "ERROR: Add Doris Backend Failed after 2 
mins wait..." && return 1; fi
+    if [[ ${i} -ge 60 ]]; then echo "ERROR: Doris cluster not ready after 2 
mins wait..." && return 1; fi
 
     # wait 10s for doris totally started, otherwize may encounter the error 
below,
     # ERROR 1105 (HY000) at line 102: errCode = 2, detailMessage = Failed to 
find enough backend, please check the replication num,replication tag and 
storage medium.
@@ -129,6 +171,9 @@ function add_doris_be_to_fe() {
 }
 
 function stop_doris() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    if [[ -f "${DORIS_HOME}"/ms/bin/stop.sh ]]; then bash 
"${DORIS_HOME}"/ms/bin/stop.sh; fi
+    if [[ -f "${DORIS_HOME}"/recycler/bin/stop.sh ]]; then bash 
"${DORIS_HOME}"/recycler/bin/stop.sh; fi
     if "${DORIS_HOME}"/fe/bin/stop_fe.sh &&
         "${DORIS_HOME}"/be/bin/stop_be.sh; then
         echo "INFO: normally stoped doris"
@@ -138,6 +183,38 @@ function stop_doris() {
     fi
 }
 
+function clean_fdb() {
+    instance_id="$1"
+    if [[ -z "${instance_id:-}" ]]; then return 1; fi
+    if fdbcli --exec "writemode on;clearrange 
\x01\x10instance\x00\x01\x10${instance_id}\x00\x01 
\x01\x10instance\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10meta\x00\x01\x10${instance_id}\x00\x01 
\x01\x10meta\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10txn\x00\x01\x10${instance_id}\x00\x01 
\x01\x10txn\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10version\x00\x01\x10${instance_id}\x00\x01 
\x01\x10version\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10stats\x00\x01\x10${instance_id}\x00\x01 
\x01\x10stats\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10recycle\x00\x01\x10${instance_id}\x00\x01 
\x01\x10recycle\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10job\x00\x01\x10${instance_id}\x00\x01 
\x01\x10job\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        fdbcli --exec "writemode on;clearrange 
\x01\x10copy\x00\x01\x10${instance_id}\x00\x01 
\x01\x10copy\x00\x01\x10${instance_id}\x00\xff\x00\x01" &&
+        rm -f /var/log/foundationdb/*; then
+        echo "INFO: fdb cleaned."
+    else
+        echo "ERROR: failed to clean fdb" && return 1
+    fi
+}
+
+function install_fdb() {
+    if fdbcli --exec 'status' >/dev/null; then return; fi
+    wget -c -t3 -q 
https://github.com/apple/foundationdb/releases/download/7.1.23/foundationdb-clients_7.1.23-1_amd64.deb
+    wget -c -t3 -q 
https://github.com/apple/foundationdb/releases/download/7.1.23/foundationdb-server_7.1.23-1_amd64.deb
+    sudo dpkg -i foundationdb-clients_7.1.23-1_amd64.deb 
foundationdb-server_7.1.23-1_amd64.deb
+    # /usr/lib/foundationdb/fdbmonitor --daemonize
+    # fdbcli --exec 'configure new single ssd'
+    if fdbcli --exec 'status'; then
+        echo "INFO: foundationdb installed."
+    else
+        return 1
+    fi
+}
+
 function restart_doris() {
     if stop_doris; then echo; fi
     if ! start_doris_fe; then return 1; fi
@@ -305,37 +382,14 @@ get_session_variable() {
     fi
 }
 
-set_session_variables_from_file() {
-    usage="
-    usage:
-        set_session_variables_from_file FILE
-        FILE content lile '
-        session_variable_key session_variable_value
-        ...
-        '
-    "
+show_session_variables() {
     if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
-    if [[ -z "$1" ]]; then echo "${usage}" && return 1; else sv_file="$1"; fi
-
     query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf 
query_port)
-    cl="mysql -h127.0.0.1 -P${query_port} -uroot "
-
-    ret=0
-    while read -r sv; do
-        if [[ "${sv}" == "#"* ]]; then continue; fi
-        k=$(echo "${sv}" | awk '{print $1}')
-        v=$(echo "${sv}" | awk '{print $2}' | tr '[:upper:]' '[:lower:]')
-        if ${cl} -e"set global ${k}=${v};"; then
-            if [[ "$(get_session_variable "${k}" | tr '[:upper:]' 
'[:lower:]')" == "${v}" ]]; then
-                echo "INFO:      set global ${k}=${v};"
-            else
-                echo "ERROR:     set global ${k}=${v};" && ret=1
-            fi
-        else
-            ret=1
-        fi
-    done <"${sv_file}"
-    return "${ret}"
+    if mysql -h127.0.0.1 -P"${query_port}" -uroot -e"show session variables;"; 
then
+        return
+    else
+        return 1
+    fi
 }
 
 set_session_variable() {
@@ -376,23 +430,77 @@ function reset_doris_session_variables() {
     fi
 }
 
+function set_doris_session_variables_from_file() {
+    # set session variables from file
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    session_variables_file="$1"
+    if [[ -z ${session_variables_file} ]]; then echo "ERROR: 
session_variables_file required" && return 1; fi
+    query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf 
query_port)
+    if mysql -h127.0.0.1 -P"${query_port}" -uroot -e"source 
${session_variables_file};"; then
+        echo "INFO: set session variables from file ${session_variables_file}, 
succeed"
+    else
+        echo "ERROR: set session variables from file 
${session_variables_file}, failed" && return 1
+    fi
+}
+
 archive_doris_logs() {
     if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
     archive_name="$1"
     if [[ -z ${archive_name} ]]; then echo "ERROR: archive file name required" 
&& return 1; fi
+    archive_content="regression-test/log fe/conf fe/log be/conf be/log 
session_variables"
+    if [[ -d "${DORIS_HOME}"/ms ]]; then
+        cp -rf /var/log/foundationdb "${DORIS_HOME}"/foundationdb/log
+        archive_content="${archive_content} ms/conf ms/log foundationdb/log"
+    fi
+    if [[ -d "${DORIS_HOME}"/recycler ]]; then
+        archive_content="${archive_content} recycler/conf recycler/log"
+    fi
+    if [[ -d "${DORIS_HOME}"/be/storage/error_log ]]; then
+        archive_content="${archive_content} be/storage/error_log"
+    fi
+
+    # shellcheck disable=SC2086
     if tar -I pigz \
         --directory "${DORIS_HOME}" \
         -cf "${DORIS_HOME}/${archive_name}" \
-        fe/conf \
-        fe/log \
-        be/conf \
-        be/log; then
+        ${archive_content}; then
         echo "${DORIS_HOME}/${archive_name}"
     else
         return 1
     fi
 }
 
+archive_doris_coredump() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    archive_name="$1"
+    if [[ -z ${archive_name} ]]; then echo "ERROR: archive file name required" 
&& return 1; fi
+    be_pid="$(cat "${DORIS_HOME}"/be/bin/be.pid)"
+    if [[ -z "${be_id}" ]]; then echo "ERROR: can not find be id from 
${DORIS_HOME}/be/bin/be.pid" && return 1; fi
+    if corename=$(find /var/lib/apport/coredump/ -type f -name 
"core.*${be_pid}.*"); then
+        initial_size=$(stat -c %s "${corename}")
+        while true; do
+            sleep 2
+            current_size=$(stat -c %s "${corename}")
+            if [[ ${initial_size} -eq ${current_size} ]]; then
+                break
+            else
+                initial_size=${current_size}
+            fi
+        done
+        file_size=$(stat -c %s "${corename}")
+        if ((file_size > 85899345920)); then
+            echo "coredump size ${file_size} over 80G, not upload"
+            return 1
+        else
+            #压缩core文件
+            mv "${corename}" "${DORIS_HOME}"/be/lib/
+            cd "${DORIS_HOME}"/be/lib/ || return 1
+            tar -I pigz -cf core.tar.gz be/lib/doris_be "be/lib/$(basename 
"${corename}")" >/dev/null
+            echo "$(pwd)/core.tar.gz"
+        fi
+    fi
+}
+
 print_doris_fe_log() {
     if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
     echo -e "\n\n\n\nWARNING: --------------------tail -n 100 
${DORIS_HOME}/fe/log/fe.out--------------------"
@@ -410,3 +518,125 @@ print_doris_be_log() {
     tail -n 100 "${DORIS_HOME}"/be/log/be.INFO
     echo -e "WARNING: ----------------------------------------\n\n\n\n"
 }
+
+print_fdb_log() {
+    echo
+}
+
+print_doris_conf() {
+    if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
+    echo -e "\n\n\n\nINFO: --------------------cat 
${DORIS_HOME}/fe/conf/fe.conf--------------------"
+    cat "${DORIS_HOME}"/fe/conf/fe.conf
+    if [[ -f ${DORIS_HOME}/fe/conf/fe_custom.conf ]]; then
+        echo -e "\n\n\n\nINFO: --------------------cat 
${DORIS_HOME}/fe/conf/fe_custom.conf--------------------"
+        cat "${DORIS_HOME}"/fe/conf/fe_custom.conf
+    fi
+    echo -e "\n\n\n\nINFO: --------------------tail -n 100 
${DORIS_HOME}/be/conf/be.conf--------------------"
+    tail -n 100 "${DORIS_HOME}"/be/conf/be.conf
+    if [[ -f ${DORIS_HOME}/be/conf/be_custom.conf ]]; then
+        echo -e "\n\n\n\nINFO: --------------------cat 
${DORIS_HOME}/be/conf/be_custom.conf--------------------"
+        cat "${DORIS_HOME}"/be/conf/be_custom.conf
+    fi
+    if [[ -f ${DORIS_HOME}/ms/conf/doris_cloud.conf ]]; then
+        echo -e "\n\n\n\nINFO: --------------------cat 
${DORIS_HOME}/ms/conf/doris_cloud.conf--------------------"
+        cat "${DORIS_HOME}"/ms/conf/doris_cloud.conf
+    fi
+    if [[ -f ${DORIS_HOME}/recycler/conf/doris_cloud.conf ]]; then
+        echo -e "\n\n\n\nINFO: --------------------cat 
${DORIS_HOME}/recycler/conf/doris_cloud.conf--------------------"
+        cat "${DORIS_HOME}"/recycler/conf/doris_cloud.conf
+    fi
+    echo -e "INFO: ----------------------------------------\n\n\n\n"
+}
+
+function create_warehouse() {
+    if [[ -z ${COS_ak} || -z ${COS_sk} ]]; then
+        echo "ERROR: env COS_ak and COS_sk are required." && return 1
+    fi
+    if curl 
"127.0.0.1:5000/MetaService/http/create_instance?token=greedisgood9999" -d "{
+        \"instance_id\": \"cloud_instance_0\",
+        \"name\":\"cloud_instance_0\",
+        \"user_id\":\"user-id\",
+        \"obj_info\": {
+            \"provider\": \"COS\",
+            \"region\": \"ap-beijing\",
+            \"bucket\": \"doris-build-1308700295\",
+            \"prefix\": \"ci\",
+            \"endpoint\": \"cos.ap-beijing.myqcloud.com\",
+            \"external_endpoint\": \"cos.ap-beijing.myqcloud.com\",
+            \"ak\": \"${COS_ak}\",
+            \"sk\": \"${COS_sk}\"
+        }
+    }"; then
+        echo
+    else
+        return 1
+    fi
+}
+
+function warehouse_add_fe() {
+    local ret
+    if curl 
"127.0.0.1:5000/MetaService/http/add_cluster?token=greedisgood9999" -d "{
+        \"instance_id\": \"cloud_instance_0\",
+        \"cluster\":{
+            \"type\":\"SQL\",
+            \"cluster_name\":\"RESERVED_CLUSTER_NAME_FOR_SQL_SERVER\",
+            \"cluster_id\":\"RESERVED_CLUSTER_ID_FOR_SQL_SERVER\",
+            \"nodes\":[
+                {
+                    \"cloud_unique_id\":\"cloud_unique_id_sql_server00\",
+                    \"ip\":\"127.0.0.1\",
+                    \"edit_log_port\":\"9010\",
+                    \"node_type\":\"FE_MASTER\"
+                }
+            ]
+        }
+    }"; then
+        # check
+        if ret=$(curl 
"127.0.0.1:5000/MetaService/http/get_cluster?token=greedisgood9999" -d "{
+            \"instance_id\": \"cloud_instance_0\",
+            \"cloud_unique_id\":\"cloud_unique_id_sql_server00\",
+            \"cluster_name\":\"RESERVED_CLUSTER_NAME_FOR_SQL_SERVER\",
+            \"cluster_id\":\"RESERVED_CLUSTER_ID_FOR_SQL_SERVER\"
+        }"); then
+            echo -e "warehouse_add_fe:\n${ret}"
+        fi
+    else
+        return 1
+    fi
+
+}
+
+function warehouse_add_be() {
+    local ret
+    if curl 
"127.0.0.1:5000/MetaService/http/add_cluster?token=greedisgood9999" -d "{
+        \"instance_id\": \"cloud_instance_0\",
+        \"cluster\":{
+            \"type\":\"COMPUTE\",
+            \"cluster_name\":\"cluster_name0\",
+            \"cluster_id\":\"cluster_id0\",
+            \"nodes\":[
+                {
+                    \"cloud_unique_id\":\"cloud_unique_id_compute_node0\",
+                    \"ip\":\"127.0.0.1\",
+                    \"heartbeat_port\":\"9050\"
+                }
+            ]
+        }
+    }"; then
+        # check
+        if ret=$(curl 
"127.0.0.1:5000/MetaService/http/get_cluster?token=greedisgood9999" -d "{
+            \"instance_id\": \"cloud_instance_0\",
+            \"cloud_unique_id\":\"cloud_unique_id_compute_node0\",
+            \"cluster_name\":\"cluster_name0\",
+            \"cluster_id\":\"cluster_id0\"
+        }"); then
+            echo -e "warehouse_add_be:\n${ret}"
+        fi
+    else
+        return 1
+    fi
+}
+
+function check_if_need_gcore() {
+    echo
+}
diff --git a/regression-test/pipeline/common/github-utils.sh 
b/regression-test/pipeline/common/github-utils.sh
index 1b96919912e..e4b05f3b0be 100644
--- a/regression-test/pipeline/common/github-utils.sh
+++ b/regression-test/pipeline/common/github-utils.sh
@@ -287,6 +287,21 @@ file_changed_cloud_ut() {
     echo "return no need" && return 1
 }
 
+file_changed_cloud_p0() {
+    local all_files
+    all_files=$(cat all_files)
+    if _only_modified_regression_conf; then echo "return no need" && return 1; 
fi
+    if [[ -z ${all_files} ]]; then echo "return need" && return 0; fi
+    for af in ${all_files}; do
+        if [[ "${af}" == 'cloud/src/'* ]] ||
+            [[ "${af}" == 'cloud/test/'* ]] ||
+            [[ "${af}" == 'be/src/cloud/'* ]]; then
+            echo "cloud-p0 related file changed, return need" && return 0
+        fi
+    done
+    echo "return no need" && return 1
+}
+
 file_changed_regression_p0() {
     local all_files
     all_files=$(cat all_files)
diff --git a/regression-test/pipeline/common/oss-utils.sh 
b/regression-test/pipeline/common/oss-utils.sh
index cc036fb573a..465d1085e95 100644
--- a/regression-test/pipeline/common/oss-utils.sh
+++ b/regression-test/pipeline/common/oss-utils.sh
@@ -58,6 +58,8 @@ function download_oss_file() {
     OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile-release"}"
     install_ossutil
     if ossutil cp -f \
+        -i "${OSS_accessKeyID}" \
+        -k "${OSS_accessKeySecret}" \
         "${OSS_DIR}/${file_name}" \
         "${file_name}"; then
         echo "INFO: download ${file_name} success" && return 0
diff --git a/regression-test/pipeline/common/teamcity-utils.sh 
b/regression-test/pipeline/common/teamcity-utils.sh
index be7f8af93c5..3041297562a 100644
--- a/regression-test/pipeline/common/teamcity-utils.sh
+++ b/regression-test/pipeline/common/teamcity-utils.sh
@@ -34,8 +34,8 @@ comment_to_pipeline=(
     ['external']='Doris_External_Regression'
     ['pipelinex_p0']='Doris_DorisRegression_P0RegressionPipelineX'
     ['arm']='Doris_ArmPipeline_P0Regression'
-    ['performance']='Doris_PerformanceNew_Performance'
-    ['perf']='Doris_DorisPerformance_Performance'
+    ['performance']='Doris_DorisPerformance_Performance'
+    ['cloud_p0']='Doris_DorisRegression_CloudP0'
 )
 
 # github中评论的要触发的流水线名字
@@ -54,8 +54,8 @@ conment_to_context=(
     ['external']='External Regression (Doris External Regression)'
     ['pipelinex_p0']='P0 Regression PipelineX (Doris Regression)'
     ['arm']='P0 Regression (ARM pipeline)'
-    ['performance']='performance (Performance New)'
-    ['perf']='performance (Doris Performance)'
+    ['performance']='performance (Doris Performance)'
+    ['cloud_p0']='cloud_p0 (Doris Cloud Regression)'
 )
 
 get_commit_id_of_build() {
diff --git a/regression-test/pipeline/performance/prepare.sh 
b/regression-test/pipeline/performance/prepare.sh
index ee3c8e0a990..9074653ea78 100644
--- a/regression-test/pipeline/performance/prepare.sh
+++ b/regression-test/pipeline/performance/prepare.sh
@@ -113,4 +113,4 @@ echo "#### 3. try to kill old doris process"
 # shellcheck source=/dev/null
 # stop_doris
 source 
"${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh
-stop_doris
+if stop_doris; then echo; fi
diff --git 
a/regression-test/pipeline/tpch/tpch-sf100/conf/regression-conf.groovy 
b/regression-test/pipeline/tpch/tpch-sf100/conf/regression-conf.groovy
deleted file mode 100644
index 5234ccc4241..00000000000
--- a/regression-test/pipeline/tpch/tpch-sf100/conf/regression-conf.groovy
+++ /dev/null
@@ -1,112 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-/* ******* Do not commit this file unless you know what you are doing ******* 
*/
-
-// **Note**: default db will be create if not exist
-defaultDb = "regression_test"
-
-jdbcUrl = 
"jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
-targetJdbcUrl = 
"jdbc:mysql://172.19.0.2:9131/?useLocalSessionState=true&allowLoadLocalInfile=true"
-jdbcUser = "root"
-jdbcPassword = ""
-
-feSourceThriftAddress = "127.0.0.1:9020"
-feTargetThriftAddress = "127.0.0.1:9020"
-feSyncerUser = "root"
-feSyncerPassword = ""
-
-feHttpAddress = "172.19.0.2:8131"
-feHttpUser = "root"
-feHttpPassword = ""
-
-// set DORIS_HOME by system properties
-// e.g. java -DDORIS_HOME=./
-suitePath = "${DORIS_HOME}/regression-test/suites"
-dataPath = "${DORIS_HOME}/regression-test/data"
-pluginPath = "${DORIS_HOME}/regression-test/plugins"
-realDataPath = "${DORIS_HOME}/regression-test/realdata"
-// sf1DataPath can be url like 
"https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com"; or local 
path like "/data"
-//sf1DataPath = 
"https://doris-community-test-1308700295.cos.ap-hongkong.myqcloud.com";
-
-// will test <group>/<suite>.groovy
-// empty group will test all group
-testGroups = ""
-// empty suite will test all suite
-testSuites = ""
-// empty directories will test all directories
-testDirectories = ""
-
-// this groups will not be executed
-excludeGroups = ""
-// this suites will not be executed
-
-excludeSuites = 
"test_sql_block_rule,test_profile,test_spark_load,test_refresh_mtmv,test_bitmap_filter,test_jdbc_query_mysql"
-
-// this directories will not be executed
-excludeDirectories = "workload_manager_p1,fault_injection_p0"
-
-customConf1 = "test_custom_conf_value"
-
-// for test csv with header
-enableHdfs=false // set to true if hdfs is ready
-hdfsFs = "hdfs://127.0.0.1:9000"
-hdfsUser = "doris-test"
-hdfsPasswd = ""
-brokerName = "broker_name"
-
-// broker load test config
-enableBrokerLoad=true
-
-// jdbc connector test config
-// To enable jdbc test, you need first start mysql/pg container.
-// See `docker/thirdparties/start-thirdparties-docker.sh`
-enableJdbcTest=false
-mysql_57_port=7111
-pg_14_port=7121
-mariadb_10_port=3326
-// hive catalog test config
-// To enable jdbc test, you need first start hive container.
-// See `docker/thirdparties/start-thirdparties-docker.sh`
-enableHiveTest=false
-hms_port=7141
-hiveServerPort=10000
-
-// kafka test config
-// to enable kafka test, you need firstly to start kafka container
-// See `docker/thirdparties/start-thirdparties-docker.sh`
-enableKafkaTest=true
-kafka_port=19193
-
-// iceberg test config
-iceberg_rest_uri_port=18181
-iceberg_minio_port=19001
-
-enableEsTest=false
-es_6_port=19200
-es_7_port=29200
-es_8_port=39200
-
-cacheDataPath = "/data/regression/"
-
-s3Endpoint = "cos.ap-hongkong.myqcloud.com"
-s3BucketName = "doris-build-hk-1308700295"
-s3Region = "ap-hongkong"
-
-max_failure_num=50
-
-externalEnvIp="127.0.0.1"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to