@@ -24,12 +24,7 @@ usage () {
24
24
echo " The deployment mode can be specified using the 'd' flag."
25
25
}
26
26
27
- # ## Basic Validation ###
28
- if [ ! -d " integration-test" ]; then
29
- echo " This script must be invoked from the top-level directory of the integration-tests repository"
30
- usage
31
- exit 1
32
- fi
27
+ cd " $( dirname " $0 " ) "
33
28
34
29
# ## Set sensible defaults ###
35
30
REPO=" https://github.com/apache/spark"
@@ -79,44 +74,43 @@ echo "Running tests on cluster $MASTER against $REPO."
79
74
echo " Spark images will be created in $IMAGE_REPO "
80
75
81
76
set -ex
82
- root =$( pwd )
83
-
77
+ TEST_ROOT =$( git rev-parse --show-toplevel )
78
+ SPARK_REPO_ROOT= " $TEST_ROOT /spark "
84
79
# clone spark distribution if needed.
85
- if [ -d " spark " ];
80
+ if [ -d " $SPARK_REPO_ROOT " ];
86
81
then
87
- (cd spark && git pull origin $BRANCH );
82
+ (cd $SPARK_REPO_ROOT && git pull origin $BRANCH );
88
83
else
89
- git clone $REPO ;
84
+ git clone $REPO $SPARK_REPO_ROOT
90
85
fi
91
86
92
- cd spark
87
+ cd $SPARK_REPO_ROOT
93
88
git checkout -B $BRANCH origin/$BRANCH
94
89
./dev/make-distribution.sh --tgz -Phadoop-2.7 -Pkubernetes -DskipTests
95
- tag =$( git rev-parse HEAD | cut -c -6)
96
- echo " Spark distribution built at SHA $tag "
90
+ TAG =$( git rev-parse HEAD | cut -c -6)
91
+ echo " Spark distribution built at SHA $TAG "
97
92
98
93
if [[ $DEPLOY_MODE == cloud ]] ;
99
94
then
100
- cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag build
95
+ cd dist && ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG build
101
96
if [[ $IMAGE_REPO == gcr.io* ]] ;
102
97
then
103
- gcloud docker -- push $IMAGE_REPO /spark-driver:$tag && \
104
- gcloud docker -- push $IMAGE_REPO /spark-executor:$tag && \
105
- gcloud docker -- push $IMAGE_REPO /spark-init:$tag
98
+ gcloud docker -- push $IMAGE_REPO /spark-driver:$TAG && \
99
+ gcloud docker -- push $IMAGE_REPO /spark-executor:$TAG && \
100
+ gcloud docker -- push $IMAGE_REPO /spark-init:$TAG
106
101
else
107
- ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $tag push
102
+ ./sbin/build-push-docker-images.sh -r $IMAGE_REPO -t $TAG push
108
103
fi
109
104
else
110
105
# -m option for minikube.
111
- cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $tag build
106
+ cd dist && ./sbin/build-push-docker-images.sh -m -r $IMAGE_REPO -t $TAG build
112
107
fi
113
108
114
- cd $root /integration-test
115
- $root /spark/build/mvn clean -Ddownload.plugin.skip=true integration-test \
116
- -Dspark-distro-tgz=$root /spark/* .tgz \
109
+ $TEST_ROOT /integration-test/build/mvn clean -Ddownload.plugin.skip=true integration-test \
110
+ -Dspark-distro-tgz=$SPARK_REPO_ROOT /* .tgz \
117
111
-DextraScalaTestArgs=" -Dspark.kubernetes.test.master=k8s://$MASTER \
118
- -Dspark.docker.test.driverImage=$IMAGE_REPO /spark-driver:$tag \
119
- -Dspark.docker.test.executorImage=$IMAGE_REPO /spark-executor:$tag \
120
- -Dspark.docker.test.initContainerImage=$IMAGE_REPO /spark-init:$tag " || :
112
+ -Dspark.docker.test.driverImage=$IMAGE_REPO /spark-driver:$TAG \
113
+ -Dspark.docker.test.executorImage=$IMAGE_REPO /spark-executor:$TAG \
114
+ -Dspark.docker.test.initContainerImage=$IMAGE_REPO /spark-init:$TAG " || :
121
115
122
116
echo " TEST SUITE FINISHED"
0 commit comments