Skip to content

Commit 3aafc76

Browse files
committed
Don't use JAR_CMD unless present in archive check. Add datanucleus always if present, to avoid needing a check involving JAR_CMD
1 parent 58e7198 commit 3aafc76

File tree

1 file changed

+12
-13
lines changed

1 file changed

+12
-13
lines changed

bin/compute-classpath.sh

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -93,14 +93,17 @@ if [ "$num_jars" -gt "1" ]; then
9393
exit 1
9494
fi
9595

96-
# Verify that versions of java used to build the jars and run Spark are compatible
97-
jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1)
98-
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
99-
echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
100-
echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
101-
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
102-
echo "or build Spark with Java 6." 1>&2
103-
exit 1
96+
# Only able to make this check if 'jar' command is available
97+
if [ $(command -v "$JAR_CMD") ] ; then
98+
# Verify that versions of java used to build the jars and run Spark are compatible
99+
jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1)
100+
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
101+
echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
102+
echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
103+
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
104+
echo "or build Spark with Java 6." 1>&2
105+
exit 1
106+
fi
104107
fi
105108

106109
CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
@@ -121,11 +124,7 @@ datanucleus_jars="$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\
121124
datanucleus_jars="$(echo "$datanucleus_jars" | tr "\n" : | sed s/:$//g)"
122125

123126
if [ -n "$datanucleus_jars" ]; then
124-
hive_files=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" org/apache/hadoop/hive/ql/exec 2>/dev/null)
125-
if [ -n "$hive_files" ]; then
126-
echo "Spark assembly has been built with Hive, including Datanucleus jars on classpath" 1>&2
127-
CLASSPATH="$CLASSPATH:$datanucleus_jars"
128-
fi
127+
CLASSPATH="$CLASSPATH:$datanucleus_jars"
129128
fi
130129

131130
# Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1

0 commit comments

Comments
 (0)