|
16 | 16 | # See the License for the specific language governing permissions and |
17 | 17 | # limitations under the License. |
18 | 18 | # |
| 19 | +set -ex |
19 | 20 | TEST_ROOT_DIR=$(git rev-parse --show-toplevel) |
20 | 21 | UNPACKED_SPARK_TGZ="$TEST_ROOT_DIR/target/spark-dist-unpacked" |
21 | 22 | IMAGE_TAG_OUTPUT_FILE="$TEST_ROOT_DIR/target/image-tag.txt" |
@@ -58,50 +59,59 @@ while (( "$#" )); do |
58 | 59 | shift |
59 | 60 | done |
60 | 61 |
|
61 | | -rm -rf $UNPACKED_SPARK_TGZ |
| 62 | +rm -rf "$UNPACKED_SPARK_TGZ" |
62 | 63 | if [[ $SPARK_TGZ == "N/A" && $IMAGE_TAG == "N/A" ]]; |
63 | 64 | then |
64 | | - echo "Must specify a Spark tarball to build Docker images against with --spark-tgz OR image with --image-tag." && exit 1; |
65 | | -else |
| 65 | + # If there is no spark image tag to test with and no src dir, build from current |
| 66 | + SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" |
| 67 | + SPARK_INPUT_DIR="$(cd "$SCRIPT_DIR/"../../../../ >/dev/null 2>&1 && pwd )" |
| 68 | + DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/resource-managers/kubernetes/docker/src/main/dockerfiles/spark" |
| 69 | +elif [[ $IMAGE_TAG == "N/A" ]]; |
| 70 | +then |
| 71 | + # If there is a test src tarball and no image tag we will want to build from that |
66 | 72 | mkdir -p $UNPACKED_SPARK_TGZ |
67 | 73 | tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ; |
| 74 | + SPARK_INPUT_DIR="$UNPACKED_SPARK_TGZ" |
| 75 | + DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/kubernetes/dockerfiles/spark" |
68 | 76 | fi |
69 | 77 |
|
| 78 | + |
| 79 | +# If there is a specific Spark image skip building and extraction/copy |
70 | 80 | if [[ $IMAGE_TAG == "N/A" ]]; |
71 | 81 | then |
72 | 82 | IMAGE_TAG=$(uuidgen); |
73 | | - cd $UNPACKED_SPARK_TGZ |
| 83 | + cd $SPARK_INPUT_DIR |
74 | 84 |
|
75 | 85 | # Build PySpark image |
76 | | - LANGUAGE_BINDING_BUILD_ARGS="-p $UNPACKED_SPARK_TGZ/kubernetes/dockerfiles/spark/bindings/python/Dockerfile" |
| 86 | + LANGUAGE_BINDING_BUILD_ARGS="-p $DOCKER_FILE_BASE_PATH/bindings/python/Dockerfile" |
77 | 87 |
|
78 | 88 | # Build SparkR image |
79 | | - LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $UNPACKED_SPARK_TGZ/kubernetes/dockerfiles/spark/bindings/R/Dockerfile" |
| 89 | + LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $DOCKER_FILE_BASE_PATH/bindings/R/Dockerfile" |
80 | 90 |
|
81 | 91 | case $DEPLOY_MODE in |
82 | 92 | cloud) |
83 | 93 | # Build images |
84 | | - $UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build |
| 94 | + $SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build |
85 | 95 |
|
86 | 96 | # Push images appropriately |
87 | 97 | if [[ $IMAGE_REPO == gcr.io* ]] ; |
88 | 98 | then |
89 | 99 | gcloud docker -- push $IMAGE_REPO/spark:$IMAGE_TAG |
90 | 100 | else |
91 | | - $UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push |
| 101 | + $SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push |
92 | 102 | fi |
93 | 103 | ;; |
94 | 104 |
|
95 | 105 | docker-for-desktop) |
96 | 106 | # Only need to build as this will place it in our local Docker repo which is all |
97 | 107 | # we need for Docker for Desktop to work so no need to also push |
98 | | - $UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build |
| 108 | + $SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build |
99 | 109 | ;; |
100 | 110 |
|
101 | 111 | minikube) |
102 | 112 | # Only need to build and if we do this with the -m option for minikube we will |
103 | 113 | # build the images directly using the minikube Docker daemon so no need to push |
104 | | - $UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build |
| 114 | + $SPARK_INPUT_DIR/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build |
105 | 115 | ;; |
106 | 116 | *) |
107 | 117 | echo "Unrecognized deploy mode $DEPLOY_MODE" && exit 1 |
|
0 commit comments