This is an automated email from the ASF dual-hosted git repository.
kou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow.git
The following commit(s) were added to refs/heads/main by this push:
new db2d83129e GH-46656: [CI][Dev] Fix shellcheck SC2034 and SC2086 errors
in ci/scripts directory (#46657)
db2d83129e is described below
commit db2d83129e09dff5d9856ab07dc8bee6783a7350
Author: Hiroyuki Sato <[email protected]>
AuthorDate: Mon Jun 2 09:58:57 2025 +0900
GH-46656: [CI][Dev] Fix shellcheck SC2034 and SC2086 errors in ci/scripts
directory (#46657)
### Rationale for this change
We are trying to implement shellcheck on all sh files in #44748.
### What changes are included in this PR?
* SC2034 unused variable error. Use variable properly like `${1}` ->
`${arrow_dir}`.
* SC2086 check require quoting like `${download_url}` ->
`"${download_url}"`.
```
In ci/scripts/install_conda.sh line 30:
version=$2
^-----^ SC2034 (warning): version appears unused. Verify use (or export if
used externally).
In ci/scripts/install_conda.sh line 37:
wget -nv ${download_url} -O /tmp/installer.sh
^-------------^ SC2086 (info): Double quote to prevent globbing
and word splitting.
```
### Are these changes tested?
Yes.
### Are there any user-facing changes?
No.
* GitHub Issue: #46656
Lead-authored-by: Hiroyuki Sato <[email protected]>
Co-authored-by: Sutou Kouhei <[email protected]>
Signed-off-by: Sutou Kouhei <[email protected]>
---
.pre-commit-config.yaml | 3 +++
ci/scripts/install_conda.sh | 8 ++++----
ci/scripts/integration_spark.sh | 17 ++++++++++-------
ci/scripts/nanoarrow_build.sh | 8 ++++----
4 files changed, 21 insertions(+), 15 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index da6d5b2b65..38a8972071 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -276,6 +276,7 @@ repos:
?^ci/scripts/install_ceph\.sh$|
?^ci/scripts/install_chromedriver\.sh$|
?^ci/scripts/install_cmake\.sh$|
+ ?^ci/scripts/install_conda\.sh$|
?^ci/scripts/install_emscripten\.sh$|
?^ci/scripts/install_iwyu\.sh$|
?^ci/scripts/install_ninja\.sh$|
@@ -286,9 +287,11 @@ repos:
?^ci/scripts/install_vcpkg\.sh$|
?^ci/scripts/integration_arrow_build\.sh$|
?^ci/scripts/integration_dask\.sh$|
+ ?^ci/scripts/integration_spark\.sh$|
?^ci/scripts/matlab_build\.sh$|
?^ci/scripts/msys2_system_clean\.sh$|
?^ci/scripts/msys2_system_upgrade\.sh$|
+ ?^ci/scripts/nanoarrow_build\.sh$|
?^ci/scripts/python_sdist_build\.sh$|
?^ci/scripts/python_wheel_unix_test\.sh$|
?^ci/scripts/r_build\.sh$|
diff --git a/ci/scripts/install_conda.sh b/ci/scripts/install_conda.sh
index 8539a0b2bb..c74b318cfa 100755
--- a/ci/scripts/install_conda.sh
+++ b/ci/scripts/install_conda.sh
@@ -30,16 +30,16 @@ installer=$1
version=$2
prefix=$3
-download_url=https://github.com/conda-forge/miniforge/releases/latest/download/${installer^}-${platform}-${arch}.sh
+download_url=https://github.com/conda-forge/miniforge/releases/${version}/download/${installer^}-${platform}-${arch}.sh
echo "Downloading Miniconda installer from ${download_url} ..."
-wget -nv ${download_url} -O /tmp/installer.sh
-bash /tmp/installer.sh -b -p ${prefix}
+wget -nv "${download_url}" -O /tmp/installer.sh
+bash /tmp/installer.sh -b -p "${prefix}"
rm /tmp/installer.sh
# Like "conda init", but for POSIX sh rather than bash
-ln -s ${prefix}/etc/profile.d/conda.sh /etc/profile.d/conda.sh
+ln -s "${prefix}/etc/profile.d/conda.sh" /etc/profile.d/conda.sh
export PATH=/opt/conda/bin:$PATH
diff --git a/ci/scripts/integration_spark.sh b/ci/scripts/integration_spark.sh
index f7ef87a8b8..b6b45a796a 100755
--- a/ci/scripts/integration_spark.sh
+++ b/ci/scripts/integration_spark.sh
@@ -18,16 +18,19 @@
# exit on any error
set -eu
-source_dir=${1}
-spark_dir=${2}
+if [ "$#" -lt 2 ]; then
+ echo "Usage: $0 <spark_version> <spark_dir>"
+ exit 1
+fi
# Spark branch to checkout
-spark_version=${SPARK_VERSION:-master}
+spark_version=${1}
+spark_dir=${2}
# Use old behavior that always dropped timezones.
export PYARROW_IGNORE_TIMEZONE=1
-if [ "${SPARK_VERSION:1:2}" == "2." ]; then
+if [ "${spark_version:1:2}" == "2." ]; then
#
https://github.com/apache/spark/blob/master/docs/sql-pyspark-pandas-with-arrow.md#compatibility-setting-for-pyarrow--0150-and-spark-23x-24x
export ARROW_PRE_0_15_IPC_FORMAT=1
fi
@@ -35,8 +38,8 @@ fi
export MAVEN_OPTS="-Xss256m -Xmx2g -XX:ReservedCodeCacheSize=1g
-Dorg.slf4j.simpleLogger.defaultLogLevel=warn"
export MAVEN_OPTS="${MAVEN_OPTS}
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
-pushd ${spark_dir}
- echo "Building Spark ${SPARK_VERSION}"
+pushd "${spark_dir}"
+ echo "Building Spark ${spark_version}"
# Build Spark only
build/mvn -B -DskipTests package
@@ -50,7 +53,7 @@ pushd ${spark_dir}
"pyspark.sql.tests.arrow.test_arrow_map"
"pyspark.sql.tests.arrow.test_arrow_python_udf")
- case "${SPARK_VERSION}" in
+ case "${spark_version}" in
v1.*|v2.*|v3.0.*|v3.1.*|v3.2.*|v3.3.*)
old_test_modules=true
;;
diff --git a/ci/scripts/nanoarrow_build.sh b/ci/scripts/nanoarrow_build.sh
index 6f7c82b099..8627c0984f 100755
--- a/ci/scripts/nanoarrow_build.sh
+++ b/ci/scripts/nanoarrow_build.sh
@@ -20,7 +20,7 @@
set -e
arrow_dir=${1}
-source_dir=${1}/nanoarrow
+source_dir=${arrow_dir}/nanoarrow
build_dir=${2}/nanoarrow
# This file is used to build the nanoarrow binaries needed for the archery
@@ -43,10 +43,10 @@ fi
set -x
-mkdir -p ${build_dir}
-pushd ${build_dir}
+mkdir -p "${build_dir}"
+pushd "${build_dir}"
-cmake ${source_dir} \
+cmake "${source_dir}" \
-DNANOARROW_IPC=ON \
-DNANOARROW_IPC_WITH_ZSTD=ON \
-DNANOARROW_BUILD_INTEGRATION_TESTS=ON