|
1 | 1 | #!/usr/bin/env bash |
2 | 2 |
|
3 | | -# This file contains environment variables required to run Spark. Copy it as |
4 | | -# spark-env.sh and edit that to configure Spark for your site. |
5 | | -# |
6 | | -# The following variables can be set in this file: |
7 | | -# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node |
8 | | -# - MESOS_NATIVE_LIBRARY, to point to your libmesos.so if you use Mesos |
9 | | -# - SPARK_JAVA_OPTS, to set node-specific JVM options for Spark. Note that |
10 | | -# we recommend setting app-wide options in the application's driver program. |
11 | | -# Examples of node-specific options : -Dspark.local.dir, GC options |
12 | | -# Examples of app-wide options : -Dspark.serializer |
13 | | -# |
14 | | -# If using the standalone deploy mode, you can also set variables for it here: |
15 | | -# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname |
16 | | -# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports |
17 | | -# - SPARK_WORKER_CORES, to set the number of cores to use on this machine |
18 | | -# - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g) |
19 | | -# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT |
20 | | -# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node |
21 | | -# - SPARK_WORKER_DIR, to set the working directory of worker processes |
22 | 3 | echoerr() { echo "$@" 1>&2; } |
23 | 4 | FWDIR="$(cd `dirname $0`/..; pwd)" |
24 | 5 |
|
25 | | -if [[ -z "$MASTER" ]]; then |
26 | | - if [ "$(uname)" == "Darwin" ]; then |
27 | | - echoerr "Sparkify: Connecting to chicago spark cluster ..." |
28 | | - export MASTER=spark://dn05.chi.shopify.com:7077 |
29 | 6 |
|
30 | | - # Figure out the local IP to bind spark to for shell <-> master communication |
31 | | - vpn_interface=tap0; |
32 | | - get_ip_command="ifconfig $vpn_interface 2>&1 | grep 'inet' | awk '{print \$2}'" |
33 | | - if ifconfig $vpn_interface > /dev/null 2>&1; then |
34 | | - export SPARK_LOCAL_IP=`bash -c "$get_ip_command"` |
35 | | - else |
36 | | - echoerr "ERROR: could not find an VPN interface to connect to the Shopify Spark Cluster! Please connect your VPN client! See https://vault-unicorn.shopify.com/VPN---Servers ." |
37 | | - exit 1 |
38 | | - fi |
39 | | - fi |
| 7 | +if [ "$(uname)" == "Darwin" ]; then |
| 8 | + case "$PYTHON_ENV" in |
| 9 | + 'remote_development') |
| 10 | + echoerr "Sparkify: Connecting to chicago spark cluster ..." |
| 11 | + # Figure out the local IP to bind spark to for shell <-> master communication |
| 12 | + vpn_interface=tap0; |
| 13 | + get_ip_command="ifconfig $vpn_interface 2>&1 | grep 'inet' | awk '{print \$2}'" |
| 14 | + if ifconfig $vpn_interface > /dev/null 2>&1; then |
| 15 | + export SPARK_LOCAL_IP=`bash -c "$get_ip_command"` |
| 16 | + else |
| 17 | + echoerr "ERROR: could not find an VPN interface to connect to the Shopify Spark Cluster! Please connect your VPN client! See https://vault-unicorn.shopify.com/VPN---Servers ." |
| 18 | + exit 1 |
| 19 | + fi |
| 20 | + |
| 21 | + export HADOOP_CONF_DIR=$FWDIR/conf/conf.cloudera.yarn |
| 22 | + ;; |
| 23 | + 'test'|'development') |
| 24 | + export SPARK_LOCAL_IP=127.0.0.1 |
| 25 | + ;; |
| 26 | + esac |
40 | 27 | fi |
41 | 28 |
|
42 | 29 | if which ipython > /dev/null; then |
43 | 30 | export IPYTHON=1 |
44 | 31 | fi |
45 | | - |
46 | | -if [[ $MASTER == 'local' ]]; then |
47 | | - export SPARK_LOCAL_IP=127.0.0.1 |
48 | | -fi |
49 | | - |
50 | | -export HADOOP_CONF_DIR=$FWDIR/conf/conf.cloudera.yarn |
0 commit comments