This is an automated email from the ASF dual-hosted git repository.

mck pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/cassandra.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 6643ea7551 Add the dtest-upgrade-large (and other) test type for 
resource-intensive Python upgrade tests
6643ea7551 is described below

commit 6643ea7551d904383af6d036e18b8a66af0ceba2
Author: Mick Semb Wever <[email protected]>
AuthorDate: Sun Jul 16 13:19:49 2023 +0200

    Add the dtest-upgrade-large (and other) test type for resource-intensive 
Python upgrade tests
    
    Fixes:
     - regular expressions when filtering on tests to run, and exit when no 
match
     - safety when removing (cleaning up) directories (${DIST_DIR} and 
${TMPDIR})
     - pre-condition `ant jar` has been used and not `ant artifacts`
     - compatibility with git worktrees (the original git working directory 
needs to be mounted in its existing path in docker)
     - fix tarball for ccm logs
     - add `--keep-failed-test-dir` to dtest-upgrade test types
     - only do `docker login` if `docker pull` is going to be called
     - unset CASSANDRA_HOME when running python dtests
    
     patch by Mick Semb Wever; reviewed by Brandon Williams, Jacek Lewandowski 
for CASSANDRA-18567, CASSANDRA-18499
---
 .build/README.md             | 12 +++++++++++-
 .build/docker/_docker_run.sh | 15 ++++++++++++---
 .build/docker/run-tests.sh   | 23 ++++++++++++++++-------
 .build/run-python-dtests.sh  | 42 +++++++++++++++++++++++++++---------------
 .build/run-tests.sh          | 30 +++++++++++++++++++++---------
 5 files changed, 87 insertions(+), 35 deletions(-)

diff --git a/.build/README.md b/.build/README.md
index 6bdb5a2c76..ae50b5a738 100644
--- a/.build/README.md
+++ b/.build/README.md
@@ -96,6 +96,7 @@ Running unittests with a specific jdk with docker:
 Running only unit tests matching a regexp, with docker:
 
     .build/docker/run-tests.sh test VerifyTest 11
+    .build/docker/run-tests.sh test "Compaction*Test$" 11
 
 
 Running other types of tests with docker:
@@ -106,6 +107,9 @@ Running other types of tests with docker:
     .build/docker/run-tests.sh microbench
     .build/docker/run-tests.sh test-cdc
     .build/docker/run-tests.sh test-compression
+    .build/docker/run-tests.sh test-oa
+    .build/docker/run-tests.sh test-system-keyspace-directory
+    .build/docker/run-tests.sh test-tries
     .build/docker/run-tests.sh test-burn
     .build/docker/run-tests.sh long-test
     .build/docker/run-tests.sh cqlsh-test
@@ -117,6 +121,7 @@ Running other types of tests with docker:
     .build/docker/run-tests.sh dtest-large
     .build/docker/run-tests.sh dtest-large-novnode
     .build/docker/run-tests.sh dtest-upgrade
+    .build/docker/run-tests.sh dtest-upgrade-large
 
 
 Running python dtests without docker:
@@ -126,5 +131,10 @@ Running python dtests without docker:
 
 Other test types without docker:
 
-    .build/run-tests.sh test
+    .build/run-tests.sh jvm-test
+
+
+Other python dtest types without docker:
+
+    .build/run-python-dtests.sh dtest-upgrade-large
 
diff --git a/.build/docker/_docker_run.sh b/.build/docker/_docker_run.sh
index 13ecdaa8cf..12228d258f 100755
--- a/.build/docker/_docker_run.sh
+++ b/.build/docker/_docker_run.sh
@@ -78,19 +78,28 @@ chmod -R ag+rwx ${build_dir}
 #
 ################################
 
+# git worktrees need their original working directory (in its original path)
+if [ -f ${cassandra_dir}/.git ] ; then
+    git_location="$(cat ${cassandra_dir}/.git | awk -F".git" '{print $1}' | 
awk '{print $2}')"
+    docker_volume_opt="${docker_volume_opt} -v${git_location}:${git_location}"
+fi
+
 pushd ${cassandra_dir}/.build >/dev/null
 
 image_tag="$(md5sum docker/${dockerfile} | cut -d' ' -f1)"
 image_name="apache/cassandra-${dockerfile/.docker/}:${image_tag}"
 
 # Look for existing docker image, otherwise build
-timeout -k 5 5 docker login >/dev/null
-if ! ( [[ "$(docker images -q ${image_name} 2>/dev/null)" != "" ]] || docker 
pull -q ${image_name} ) ; then
+if ! ( [[ "$(docker images -q ${image_name} 2>/dev/null)" != "" ]] ) ; then
+  # try docker login to increase dockerhub rate limits
+  timeout -k 5 5 docker login >/dev/null
+  if ! ( docker pull -q ${image_name} >/dev/null 2>/dev/null ) ; then
     # Create build images containing the build tool-chain, Java and an Apache 
Cassandra git working directory, with retry
     until docker build -t ${image_name} -f docker/${dockerfile} .  ; do
         echo "docker build failed… trying again in 10s… "
         sleep 10
     done
+  fi
 fi
 
 # Run build script through docker
@@ -110,7 +119,7 @@ docker_command="export ANT_OPTS=\"-Dbuild.dir=\${DIST_DIR} 
${CASSANDRA_DOCKER_AN
 # re-use the host's maven repository
 container_id=$(docker run --name ${container_name} -d --security-opt 
seccomp=unconfined --rm \
     -v "${cassandra_dir}":/home/build/cassandra -v 
~/.m2/repository/:/home/build/.m2/repository/ -v "${build_dir}":/dist \
-    ${build_volume_opt} \
+    ${docker_volume_opt} \
     ${image_name} sleep 1h)
 
 echo "Running container ${container_name} ${container_id}"
diff --git a/.build/docker/run-tests.sh b/.build/docker/run-tests.sh
index d6ef16f7fb..dda7765972 100755
--- a/.build/docker/run-tests.sh
+++ b/.build/docker/run-tests.sh
@@ -27,7 +27,7 @@
 # help
 if [ "$#" -lt 1 ] || [ "$#" -gt 3 ] || [ "$1" == "-h" ]; then
     echo ""
-    echo "Usage: run-tests.sh target [split_chunk|test_regexp] [java_version]"
+    echo "Usage: run-tests.sh test_type [split_chunk|test_regexp] 
[java_version]"
     echo ""
     echo "        default split_chunk is 1/1"
     echo "        default java_version is what 'java.default' specifies in 
build.xml"
@@ -84,13 +84,16 @@ 
image_name="apache/cassandra-${dockerfile/.docker/}:${image_tag}"
 docker_mounts="-v ${cassandra_dir}:/home/cassandra/cassandra -v 
"${build_dir}":/home/cassandra/cassandra/build -v 
${HOME}/.m2/repository:/home/cassandra/.m2/repository"
 
 # Look for existing docker image, otherwise build
-timeout -k 5 5 docker login >/dev/null 2>/dev/null
-if ! ( [[ "$(docker images -q ${image_name} 2>/dev/null)" != "" ]] || docker 
pull -q ${image_name} ) >/dev/null 2>/dev/null ; then
+if ! ( [[ "$(docker images -q ${image_name} 2>/dev/null)" != "" ]] ) ; then
+  # try docker login to increase dockerhub rate limits
+  timeout -k 5 5 docker login >/dev/null 2>/dev/null
+  if ! ( docker pull -q ${image_name} >/dev/null 2>/dev/null ) ; then
     # Create build images containing the build tool-chain, Java and an Apache 
Cassandra git working directory, with retry
     until docker build -t ${image_name} -f docker/${dockerfile} .  ; do
         echo "docker build failed… trying again in 10s… "
         sleep 10
     done
+  fi
 fi
 
 pushd ${cassandra_dir} >/dev/null
@@ -118,7 +121,7 @@ case ${target} in
     "stress-test" | "fqltool-test" | "microbench" | "test-burn" | "long-test" 
| "cqlsh-test" )
         [[ ${mem} -gt $((5 * 1024 * 1024 * 1024 * ${jenkins_executors})) ]] || 
{ echo >&2 "tests require minimum docker memory 6g (per jenkins executor 
(${jenkins_executors})), found ${mem}"; exit 1; }
     ;;
-    "dtest" | "dtest-novnode" | "dtest-offheap" | "dtest-large" | 
"dtest-large-novnode" | "dtest-upgrade" )
+    "dtest" | "dtest-novnode" | "dtest-offheap" | "dtest-large" | 
"dtest-large-novnode" | "dtest-upgrade" | "dtest-upgrade-large" )
         [ -f "${cassandra_dtest_dir}/dtest.py" ] || { echo >&2 
"${cassandra_dtest_dir}/dtest.py must exist"; exit 1; }
         [[ ${mem} -gt $((15 * 1024 * 1024 * 1024 * ${jenkins_executors})) ]] 
|| { echo >&2 "dtests require minimum docker memory 16g (per jenkins executor 
(${jenkins_executors})), found ${mem}"; exit 1; }
         test_script="run-python-dtests.sh"
@@ -126,13 +129,13 @@ case ${target} in
         # check that ${cassandra_dtest_dir} is valid
         [ -f "${cassandra_dtest_dir}/dtest.py" ] || { echo >&2 
"${cassandra_dtest_dir}/dtest.py not found. please specify 
'cassandra_dtest_dir' to point to the local cassandra-dtest source"; exit 1; }
     ;;
-    "test"| "test-cdc" | "test-compression" | "jvm-dtest" | 
"jvm-dtest-upgrade")
+    "test"| "test-cdc" | "test-compression" | "test-oa" | 
"test-system-keyspace-directory" | "test-tries" | "jvm-dtest" | 
"jvm-dtest-upgrade")
         [[ ${mem} -gt $((5 * 1024 * 1024 * 1024 * ${jenkins_executors})) ]] || 
{ echo >&2 "tests require minimum docker memory 6g (per jenkins executor 
(${jenkins_executors})), found ${mem}"; exit 1; }
         max_docker_runs_by_cores=$( echo "sqrt( ${cores} / 
${jenkins_executors} )" | bc )
         max_docker_runs_by_mem=$(( ${mem} / ( 5 * 1024 * 1024 * 1024 * 
${jenkins_executors} ) ))
     ;;
     *)
-    echo "unrecognized \"${target}\""
+    echo "unrecognized test type \"${target}\""
     exit 1
     ;;
 esac
@@ -177,6 +180,12 @@ if [[ "${split_chunk}" =~ ^[0-9]+/[0-9]+$ ]]; then
     split_str="${split_chunk/\//_}"
 fi
 
+# git worktrees need their original working directory (in its original path)
+if [ -f ${cassandra_dir}/.git ] ; then
+    git_location="$(cat ${cassandra_dir}/.git | awk -F".git" '{print $1}' | 
awk '{print $2}')"
+    docker_volume_opt="${docker_volume_opt} -v${git_location}:${git_location}"
+fi
+
 random_string="$(LC_ALL=C tr -dc A-Za-z0-9 </dev/urandom | head -c 6 ; echo 
'')"
 
 
container_name="cassandra_${dockerfile/.docker/}_${target}_jdk${java_version/./-}_arch-$(arch)_python${python_version/./-}_${split_str}__${random_string}"
@@ -190,7 +199,7 @@ docker_command="source 
\${CASSANDRA_DIR}/.build/docker/_set_java.sh ${java_versi
             \${CASSANDRA_DIR}/.build/docker/_docker_init_tests.sh ${target} 
${split_chunk} ; exit \$?"
 
 # start the container, timeout after 4 hours
-docker_id=$(docker run --name ${container_name} ${docker_flags} 
${docker_mounts} ${image_name} sleep 4h)
+docker_id=$(docker run --name ${container_name} ${docker_flags} 
${docker_mounts} ${docker_volume_opt} ${image_name} sleep 4h)
 
 echo "Running container ${container_name} ${docker_id}"
 
diff --git a/.build/run-python-dtests.sh b/.build/run-python-dtests.sh
index 14ad1ad578..c43057926d 100755
--- a/.build/run-python-dtests.sh
+++ b/.build/run-python-dtests.sh
@@ -16,10 +16,8 @@
 # limitations under the License.
 
 #
-# Wrapper script for running a split chunk of a pytest run of cassandra-dtest
+# Wrapper script for running a split or regexp of a pytest run from 
cassandra-dtest
 #
-# Usage: dtest-python.sh target split_chunk
-#  split_chunk formatted as "K/N" for the Kth chunk of N chunks
 
 ################################
 #
@@ -27,6 +25,16 @@
 #
 ################################
 
+
+# help
+if [ "$#" -lt 1 ] || [ "$#" -gt 2 ] || [ "$1" == "-h" ]; then
+    echo ""
+    echo "Usage: run-python-dtest.sh test_type [split_chunk|test_regexp]"
+    echo ""
+    echo "        default split_chunk is 1/1"
+    exit 1
+fi
+
 # Pass in target to run, defaults to dtest
 DTEST_TARGET="${1:-dtest}"
 # Optional: pass in chunk to test, formatted as "K/N" for the Kth chunk of N 
chunks
@@ -48,6 +56,7 @@ export NUM_TOKENS="16"
 #Have Cassandra skip all fsyncs to improve test performance and reliability
 export CASSANDRA_SKIP_SYNC=true
 export TMPDIR="$(mktemp -d /tmp/run-python-dtest.XXXXXX)"
+unset CASSANDRA_HOME
 
 # pre-conditions
 command -v ant >/dev/null 2>&1 || { echo >&2 "ant needs to be installed"; exit 
1; }
@@ -85,7 +94,7 @@ ant -quiet -silent resolver-dist-lib
 set -e # enable immediate exit if venv setup fails
 
 # fresh virtualenv and test logs results everytime
-rm -rf ${DIST_DIR}/venv ${DIST_DIR}/test/{html,output,logs}
+[[ "/" == "${DIST_DIR}" ]] || rm -rf "${DIST_DIR}/venv" 
"${DIST_DIR}/test/{html,output,logs}"
 
 # re-use when possible the pre-installed virtualenv found in the 
cassandra-ubuntu2004_test docker image
 virtualenv-clone ${BUILD_HOME}/env${python_version} ${DIST_DIR}/venv || 
virtualenv --python=python${python_version} ${DIST_DIR}/venv
@@ -103,17 +112,19 @@ cd ${CASSANDRA_DTEST_DIR}
 
 set +e # disable immediate exit from this point
 if [ "${DTEST_TARGET}" = "dtest" ]; then
-    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--skip-resource-intensive-tests --keep-failed-test-dir"
+    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--skip-resource-intensive-tests"
 elif [ "${DTEST_TARGET}" = "dtest-novnode" ]; then
     DTEST_ARGS="--skip-resource-intensive-tests --keep-failed-test-dir"
 elif [ "${DTEST_TARGET}" = "dtest-offheap" ]; then
-    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--use-off-heap-memtables --skip-resource-intensive-tests --keep-failed-test-dir"
+    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--use-off-heap-memtables --skip-resource-intensive-tests"
 elif [ "${DTEST_TARGET}" = "dtest-large" ]; then
-    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--only-resource-intensive-tests --force-resource-intensive-tests 
--keep-failed-test-dir"
+    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--only-resource-intensive-tests --force-resource-intensive-tests"
 elif [ "${DTEST_TARGET}" = "dtest-large-novnode" ]; then
-    DTEST_ARGS="--only-resource-intensive-tests 
--force-resource-intensive-tests --keep-failed-test-dir"
+    DTEST_ARGS="--only-resource-intensive-tests 
--force-resource-intensive-tests"
 elif [ "${DTEST_TARGET}" = "dtest-upgrade" ]; then
-    DTEST_ARGS="--execute-upgrade-tests-only --upgrade-target-version-only 
--upgrade-version-selection all"
+    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--execute-upgrade-tests --execute-upgrade-tests-only 
--upgrade-target-version-only --upgrade-version-selection all"
+elif [ "${DTEST_TARGET}" = "dtest-upgrade-large" ]; then
+    DTEST_ARGS="--use-vnodes --num-tokens=${NUM_TOKENS} 
--execute-upgrade-tests --execute-upgrade-tests-only 
--upgrade-target-version-only --upgrade-version-selection all 
--only-resource-intensive-tests --force-resource-intensive-tests"
 else
     echo "Unknown dtest target: ${DTEST_TARGET}"
     exit 1
@@ -128,20 +139,21 @@ if [[ "${DTEST_SPLIT_CHUNK}" =~ ^[0-9]+/[0-9]+$ ]]; then
     ( split --help 2>&1 ) | grep -q "r/K/N" || split_cmd=gsplit
     command -v ${split_cmd} >/dev/null 2>&1 || { echo >&2 "${split_cmd} needs 
to be installed"; exit 1; }
     SPLIT_TESTS=$(${split_cmd} -n r/${DTEST_SPLIT_CHUNK} 
${DIST_DIR}/test_list.txt)
-elif [[ "x" != "x${_split_chunk}" ]] ; then
-    SPLIT_TESTS=$(grep "${DTEST_SPLIT_CHUNK}" ${DIST_DIR}/test_list.txt)
+elif [[ "x" != "x${DTEST_SPLIT_CHUNK}" ]] ; then
+    SPLIT_TESTS=$(grep -e "${DTEST_SPLIT_CHUNK}" ${DIST_DIR}/test_list.txt)
+    [[ "x" != "x${SPLIT_TESTS}" ]] || { echo "no tests match regexp 
\"${DTEST_SPLIT_CHUNK}\""; exit 1; }
 else
     SPLIT_TESTS=$(cat ${DIST_DIR}/test_list.txt)
 fi
 
 
 PYTEST_OPTS="-vv --log-cli-level=DEBUG 
--junit-xml=${DIST_DIR}/test/output/nosetests.xml 
--junit-prefix=${DTEST_TARGET} -s"
-pytest ${PYTEST_OPTS} --cassandra-dir=${CASSANDRA_DIR} ${DTEST_ARGS} 
${SPLIT_TESTS} 2>&1 | tee -a ${DIST_DIR}/test_stdout.txt
+pytest ${PYTEST_OPTS} --cassandra-dir=${CASSANDRA_DIR} --keep-failed-test-dir 
${DTEST_ARGS} ${SPLIT_TESTS} 2>&1 | tee -a ${DIST_DIR}/test_stdout.txt
 
 # tar up any ccm logs for easy retrieval
-if ls ${TMPDIR}/test/*/logs/* &>/dev/null ; then
+if ls ${TMPDIR}/*/test/*/logs/* &>/dev/null ; then
     mkdir -p ${DIST_DIR}/test/logs
-    tar -C ${TMPDIR} -cJf ${DIST_DIR}/test/logs/ccm_logs.tar.xz */test/*/logs/*
+    tar -C ${TMPDIR} -cJf ${DIST_DIR}/test/logs/ccm_logs.tar.xz 
${TMPDIR}/*/test/*/logs
 fi
 
 # merge all unit xml files into one, and print summary test numbers
@@ -158,7 +170,7 @@ popd  >/dev/null
 #
 ################################
 
-rm -rf ${TMPDIR}
+rm -rf "/tmp/run-python-dtest.${TMPDIR/\/tmp\/run-python-dtest./}"
 unset TMPDIR
 deactivate
 
diff --git a/.build/run-tests.sh b/.build/run-tests.sh
index f0388250ce..5e987fbeaf 100755
--- a/.build/run-tests.sh
+++ b/.build/run-tests.sh
@@ -16,10 +16,8 @@
 # limitations under the License.
 
 #
-# Wrapper script for running a split chunk of an ant test target
+# Wrapper script for running a split or regexp of tests (excluding python 
dtests)
 #
-# Usage: run-tests.sh target [split_chunk]
-#  split_chunk formatted as "K/N" for the Kth chunk of N chunks
 
 set -o errexit
 set -o pipefail
@@ -39,7 +37,7 @@ command -v git >/dev/null 2>&1 || { echo >&2 "git needs to be 
installed"; exit 1
 # help
 if [ "$#" -lt 1 ] || [ "$#" -gt 2 ] || [ "$1" == "-h" ]; then
     echo ""
-    echo "Usage: run-tests.sh target [split_chunk|test_regexp]"
+    echo "Usage: run-tests.sh test_type [split_chunk|test_regexp]"
     echo ""
     echo "        default split_chunk is 1/1"
     exit 1
@@ -65,7 +63,7 @@ _split_tests() {
     command -v ${split_cmd} >/dev/null 2>&1 || { echo >&2 "${split_cmd} needs 
to be installed"; exit 1; }
     ${split_cmd} -n r/${_split_chunk}
   elif [[ "x" != "x${_split_chunk}" ]] ; then
-    grep ${_split_chunk}
+    grep -e "${_split_chunk}"
   else
     echo
   fi
@@ -155,13 +153,16 @@ _main() {
   # check project is already built. no cleaning is done, so jenkins unstash 
works, beware.
   [[ -f "${DIST_DIR}/apache-cassandra-${version}.jar" ]] || [[ -f 
"${DIST_DIR}/apache-cassandra-${version}-SNAPSHOT.jar" ]] || { echo "Project 
must be built first. Use \`ant jar\`. Build directory is ${DIST_DIR} with: $(ls 
${DIST_DIR})"; exit 1; }
 
+  # check if dist artifacts exist, this breaks the dtests
+  [[ -d "${DIST_DIR}/dist" ]] && { echo "tests don't work when build/dist 
("${DIST_DIR}/dist") exists (from \`ant artifacts\`)"; exit 1; }
+
   # ant test setup
   export TMP_DIR="${DIST_DIR}/tmp"
   mkdir -p "${TMP_DIR}" || true
   export ANT_TEST_OPTS="-Dno-build-test=true -Dtmp.dir=${TMP_DIR} 
-Drat.skip=true -Dno-checkstyle=true -Dno-javadoc=true -Dant.gen-doc.skip=true"
 
   # fresh virtualenv and test logs results everytime
-  rm -rf ${DIST_DIR}/test/{html,output,logs}
+  [[ "/" == "${DIST_DIR}" ]] || rm -rf "${DIST_DIR}/test/{html,output,logs}"
 
   # cheap trick to ensure dependency libraries are in place. allows us to 
stash only project specific build artifacts.
   ant -quiet -silent resolver-dist-lib
@@ -189,6 +190,15 @@ _main() {
     "test-compression")
       _run_testlist "unit" "testclasslist-compression" "${split_chunk}" 
"$(_timeout_for 'test.timeout')"
       ;;
+    "test-oa")
+      _run_testlist "unit" "testclasslist-oa" "${split_chunk}" "$(_timeout_for 
'test.timeout')"
+      ;;
+    "test-system-keyspace-directory")
+      _run_testlist "unit" "testclasslist-system-keyspace-directory" 
"${split_chunk}" "$(_timeout_for 'test.timeout')"
+      ;;
+    "test-trie")
+      _run_testlist "unit" "testclasslist-trie" "${split_chunk}" 
"$(_timeout_for 'test.timeout')"
+      ;;
     "test-burn")
       _run_testlist "burn" "testclasslist" "${split_chunk}" "$(_timeout_for 
'test.burn.timeout')"
       ;;
@@ -198,8 +208,9 @@ _main() {
     "jvm-dtest")
       testlist=$( _list_tests "distributed" | grep -v "upgrade" | _split_tests 
"${split_chunk}")
       if [[ -z "$testlist" ]]; then
+          [[ "${split_chunk}" =~ ^[0-9]+/[0-9]+$ ]] || { echo "No tests match 
${split_chunk}"; exit 1; }
           # something has to run in the split to generate a junit xml result
-          echo Hacking jvm-dtest to run only first test found as no tests in 
split ${split_chunk} were found
+          echo "Hacking jvm-dtest to run only first test found as no tests in 
split ${split_chunk} were found"
           testlist="$( _list_tests "distributed"  | grep -v "upgrade" | head 
-n1)"
       fi
       ant testclasslist -Dtest.classlistprefix=distributed 
-Dtest.timeout=$(_timeout_for "test.distributed.timeout") 
-Dtest.classlistfile=<(echo "${testlist}") ${ANT_TEST_OPTS} || echo "failed 
${target} ${split_chunk}"
@@ -208,8 +219,9 @@ _main() {
       _build_all_dtest_jars
       testlist=$( _list_tests "distributed"  | grep "upgrade" | _split_tests 
"${split_chunk}")
       if [[ -z "${testlist}" ]]; then
+          [[ "${split_chunk}" =~ ^[0-9]+/[0-9]+$ ]] || { echo "No tests match 
${split_chunk}"; exit 1; }
           # something has to run in the split to generate a junit xml result
-          echo Hacking jvm-dtest-upgrade to run only first test found as no 
tests in split ${split_chunk} were found
+          echo "Hacking jvm-dtest-upgrade to run only first test found as no 
tests in split ${split_chunk} were found"
           testlist="$( _list_tests "distributed"  | grep "upgrade" | head -n1)"
       fi
       ant testclasslist -Dtest.classlistprefix=distributed 
-Dtest.timeout=$(_timeout_for "test.distributed.timeout") 
-Dtest.classlistfile=<(echo "${testlist}") ${ANT_TEST_OPTS} || echo "failed 
${target} ${split_chunk}"
@@ -218,7 +230,7 @@ _main() {
       ./pylib/cassandra-cqlsh-tests.sh $(pwd)
       ;;
     *)
-      echo "unregconized \"${target}\""
+      echo "unrecognized test type \"${target}\""
       exit 1
       ;;
   esac


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to