Skip to content

Commit 8ff354d

Browse files
committed
Use git rev-parse --show-toplevel for the root dir.
Makes it so that the test scripts do not necessarily have to be run from the top level of the repository.
1 parent dbd5643 commit 8ff354d

File tree

1 file changed

+20
-26
lines changed

1 file changed

+20
-26
lines changed

e2e/runner.sh

Lines changed: 20 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,7 @@ usage () {
2424
echo " The deployment mode can be specified using the 'd' flag."
2525
}
2626

27-
### Basic Validation ###
28-
if [ ! -d "integration-test" ]; then
29-
echo "This script must be invoked from the top-level directory of the integration-tests repository"
30-
usage
31-
exit 1
32-
fi
27+
cd "$(dirname "$0")"
3328

3429
### Set sensible defaults ###
3530
REPO="https://github.com/apache/spark"
@@ -79,44 +74,43 @@ echo "Running tests on cluster $MASTER against $REPO."
7974
echo "Spark images will be created in $IMAGE_REPO"
8075

8176
set -ex
82-
root=$(pwd)
83-
77+
TEST_ROOT=$(git rev-parse --show-toplevel)
78+
SPARK_REPO_ROOT="$TEST_ROOT/spark"
8479
# clone spark distribution if needed.
85-
if [ -d "spark" ];
80+
if [ -d "$SPARK_REPO_ROOT" ];
8681
then
87-
(cd spark && git pull origin $BRANCH);
82+
(cd $SPARK_REPO_ROOT && git pull origin $BRANCH);
8883
else
89-
git clone $REPO;
84+
git clone $REPO $SPARK_REPO_ROOT
9085
fi
9186

92-
cd spark
87+
cd $SPARK_REPO_ROOT
9388
git checkout -B $BRANCH origin/$BRANCH
9489
./dev/make-distribution.sh --tgz -Phadoop-2.7 -Pkubernetes -DskipTests
95-
tag=$(git rev-parse HEAD | cut -c -6)
96-
echo "Spark distribution built at SHA $tag"
90+
TAG=$(git rev-parse HEAD | cut -c -6)
91+
echo "Spark distribution built at SHA $TAG"
9792

9893
if [[ $DEPLOY_MODE == cloud ]] ;
9994
then
100-
cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag build
95+
cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG build
10196
if [[ $IMAGE_REPO == gcr.io* ]] ;
10297
then
103-
gcloud docker -- push $IMAGE_REPO/spark-driver:$tag && \
104-
gcloud docker -- push $IMAGE_REPO/spark-executor:$tag && \
105-
gcloud docker -- push $IMAGE_REPO/spark-init:$tag
98+
gcloud docker -- push $IMAGE_REPO/spark-driver:$TAG && \
99+
gcloud docker -- push $IMAGE_REPO/spark-executor:$TAG && \
100+
gcloud docker -- push $IMAGE_REPO/spark-init:$TAG
106101
else
107-
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag push
102+
./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG push
108103
fi
109104
else
110105
# -m option for minikube.
111-
cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $tag build
106+
cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $TAG build
112107
fi
113108

114-
cd $root/integration-test
115-
$root/spark/build/mvn clean -Ddownload.plugin.skip=true integration-test \
116-
-Dspark-distro-tgz=$root/spark/*.tgz \
109+
$TEST_ROOT/integration-test/build/mvn clean -Ddownload.plugin.skip=true integration-test \
110+
-Dspark-distro-tgz=$SPARK_REPO_ROOT/*.tgz \
117111
-DextraScalaTestArgs="-Dspark.kubernetes.test.master=k8s://$MASTER \
118-
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$tag \
119-
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$tag \
120-
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$tag" || :
112+
-Dspark.docker.test.driverImage=$IMAGE_REPO/spark-driver:$TAG \
113+
-Dspark.docker.test.executorImage=$IMAGE_REPO/spark-executor:$TAG \
114+
-Dspark.docker.test.initContainerImage=$IMAGE_REPO/spark-init:$TAG" || :
121115

122116
echo "TEST SUITE FINISHED"

0 commit comments

Comments
 (0)