Skip to content

Commit b6b88be

Browse files
committed
Add python dependencies to .travis.yml
1 parent e8cea41 commit b6b88be

File tree

10 files changed

+404
-10
lines changed

10 files changed

+404
-10
lines changed

.travis.yml

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,16 +66,22 @@ matrix:
6666
- jdk: "oraclejdk7"
6767
env: TEST_SELENIUM="true" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark -Pexamples" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.AbstractFunctionalSuite -DfailIfNoTests=false"
6868

69+
# Test python/pyspark with python2
70+
- jdk: "oraclejdk7"
71+
env: PYTHON="2.7" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -pl spark,python -am -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python -Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* -Dpyspark.test.exclude='' -DfailIfNoTests=false"
72+
73+
# Test python/pyspark with python3
74+
- jdk: "oraclejdk7"
75+
env: PYTHON="3.5" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -pl spark,python -am -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python -Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* -Dpyspark.test.exclude='' -DfailIfNoTests=false"
76+
6977
before_install:
7078
- echo "MAVEN_OPTS='-Xms1024M -Xmx2048M -XX:MaxPermSize=1024m -XX:-UseGCOverheadLimit'" >> ~/.mavenrc
79+
- ./testing/install_external_dependencies.sh
7180
- ls -la .spark-dist ${HOME}/.m2/repository/.cache/maven-download-plugin || true
7281
- ls .node_modules && cp -r .node_modules zeppelin-web/node_modules || echo "node_modules are not cached"
73-
- mkdir -p ~/R
74-
- echo 'R_LIBS=~/R' > ~/.Renviron
75-
- R -e "install.packages('knitr', repos = 'http://cran.us.r-project.org', lib='~/R')"
76-
- export R_LIBS='~/R'
7782
- "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1600x1024x16"
7883
- ./dev/change_scala_version.sh $SCALA_VER
84+
- source ~/.environ
7985

8086
install:
8187
- mvn -Dorg.slf4j.simpleLogger.defaultLogLevel=warn $BUILD_FLAG $PROFILE -B
@@ -99,4 +105,3 @@ after_failure:
99105
- cat zeppelin-distribution/target/zeppelin-*-SNAPSHOT/zeppelin-*-SNAPSHOT/logs/zeppelin*.out
100106
- cat zeppelin-web/npm-debug.log
101107
- cat spark-*/logs/*
102-

interpreter/lib/python/backend_zinline.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ def get_bytes(self, **kwargs):
9393
# Express the image as bytes
9494
buf = BytesIO()
9595
self.print_figure(buf, **kwargs)
96+
fmt = fmt.encode()
9697
byte_str = b"data:image/%s;base64," %fmt
9798
byte_str += base64.b64encode(buf.getvalue())
9899

python/src/main/java/org/apache/zeppelin/python/PythonInterpreter.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@ public class PythonInterpreter extends Interpreter {
5757
private Boolean py4JisInstalled = false;
5858
private InterpreterContext context;
5959
private Pattern errorInLastLine = Pattern.compile(".*(Error|Exception): .*$");
60+
private String pythonPath;
6061
private int maxResult;
6162

6263
PythonProcess process = null;
@@ -74,6 +75,8 @@ public void open() {
7475
registerHook(HookType.POST_EXEC_DEV, "z._displayhook()");
7576
}
7677

78+
// Add zeppelin-bundled libs to PYTHONPATH
79+
setPythonPath("../interpreter/lib/python:$PYTHONPATH");
7780
LOG.info("Starting Python interpreter ---->");
7881
LOG.info("Python path is set to:" + property.getProperty(ZEPPELIN_PYTHON));
7982

@@ -198,13 +201,17 @@ public List<InterpreterCompletion> completion(String buf, int cursor) {
198201
return null;
199202
}
200203

204+
public void setPythonPath(String pythonPath) {
205+
this.pythonPath = pythonPath;
206+
}
207+
201208
public PythonProcess getPythonProcess() {
202209
if (process == null) {
203210
String binPath = getProperty(ZEPPELIN_PYTHON);
204211
if (pythonCommand != null) {
205212
binPath = pythonCommand;
206213
}
207-
return new PythonProcess(binPath);
214+
return new PythonProcess(binPath, pythonPath);
208215
} else {
209216
return process;
210217
}

python/src/main/java/org/apache/zeppelin/python/PythonProcess.java

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,12 @@ public class PythonProcess {
4242
Process process;
4343

4444
private String binPath;
45+
private String pythonPath;
4546
private long pid;
4647

47-
public PythonProcess(String binPath) {
48+
public PythonProcess(String binPath, String pythonPath) {
4849
this.binPath = binPath;
50+
this.pythonPath = pythonPath;
4951
}
5052

5153
public void open() throws IOException {
@@ -65,6 +67,9 @@ public void open() throws IOException {
6567
cmd = binPath + " -iu";
6668
}
6769
builder = new ProcessBuilder("bash", "-c", cmd);
70+
if (pythonPath != null) {
71+
builder.environment().put("PYTHONPATH", pythonPath);
72+
}
6873
}
6974

7075
builder.redirectErrorStream(true);

python/src/test/java/org/apache/zeppelin/python/PythonInterpreterWithPythonInstalledTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ public void testZeppelin1555() {
102102
realPython.open();
103103

104104
//when
105-
InterpreterResult ret1 = realPython.interpret("print \"...\"", null);
105+
InterpreterResult ret1 = realPython.interpret("print(\"...\")", null);
106106

107107
//then
108108
//System.out.println("\nInterpreter response: \n" + ret.message());

spark/pom.xml

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,12 @@
3838
<mockito.version>1.10.19</mockito.version>
3939
<powermock.version>1.6.4</powermock.version>
4040
<spark.version>2.0.1</spark.version>
41+
<pyspark.test.exclude>
42+
**/PySparkInterpreterMatplotlibTest.java
43+
</pyspark.test.exclude>
44+
<pyspark.test.include>
45+
**/*Test.*
46+
</pyspark.test.include>
4147
</properties>
4248

4349
<dependencies>
@@ -322,6 +328,7 @@
322328
<argLine>-Xmx1024m -XX:MaxPermSize=256m</argLine>
323329
<excludes>
324330
<exclude>**/SparkRInterpreterTest.java</exclude>
331+
<exclude>${pyspark.test.exclude}</exclude>
325332
</excludes>
326333
</configuration>
327334
</plugin>
@@ -431,11 +438,35 @@
431438
<exclude>**/SparkRInterpreter.java</exclude>
432439
</excludes>
433440
<testExcludes>
441+
<testExclude>${pyspark.test.exclude}</testExclude>
434442
<testExclude>**/SparkRInterpreterTest.java</testExclude>
435443
<testExclude>**/ZeppelinRTest.java</testExclude>
436444
</testExcludes>
437445
</configuration>
438446
</plugin>
447+
<plugin>
448+
<groupId>org.scala-tools</groupId>
449+
<artifactId>maven-scala-plugin</artifactId>
450+
<configuration>
451+
<excludes>
452+
<exclude>**/ZeppelinR.scala</exclude>
453+
<exclude>**/SparkRBackend.scala</exclude>
454+
</excludes>
455+
</configuration>
456+
</plugin>
457+
<plugin>
458+
<groupId>org.apache.maven.plugins</groupId>
459+
<artifactId>maven-surefire-plugin</artifactId>
460+
<configuration>
461+
<includes>
462+
<include>${pyspark.test.include}</include>
463+
</includes>
464+
<excludes>
465+
<exclude>${pyspark.test.exclude}</exclude>
466+
<exclude>**/SparkRInterpreterTest.java</exclude>
467+
</excludes>
468+
</configuration>
469+
</plugin>
439470
</plugins>
440471
</build>
441472

@@ -580,7 +611,9 @@
580611
<artifactId>maven-compiler-plugin</artifactId>
581612
<configuration>
582613
<excludes combine.self="override"></excludes>
583-
<testExcludes combine.self="override"></testExcludes>
614+
<testExcludes combine.self="override">
615+
<testExclude>${pyspark.test.exclude}</testExclude>
616+
</testExcludes>
584617
</configuration>
585618
</plugin>
586619
<plugin>
@@ -596,6 +629,7 @@
596629
<artifactId>maven-surefire-plugin</artifactId>
597630
<configuration>
598631
<excludes combine.self="override">
632+
<exclude>${pyspark.test.exclude}</exclude>
599633
</excludes>
600634
</configuration>
601635
</plugin>

spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,8 @@ private Map setupPySparkEnv() throws IOException{
177177
Map env = EnvironmentUtils.getProcEnvironment();
178178
if (!env.containsKey("PYTHONPATH")) {
179179
SparkConf conf = getSparkConf();
180-
env.put("PYTHONPATH", conf.get("spark.submit.pyFiles").replaceAll(",", ":"));
180+
env.put("PYTHONPATH", conf.get("spark.submit.pyFiles").replaceAll(",", ":") +
181+
":../interpreter/lib/python");
181182
}
182183
return env;
183184
}

spark/src/main/resources/python/zeppelin_pyspark.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
from pyspark.serializers import MarshalSerializer, PickleSerializer
3030
import ast
3131
import traceback
32+
import warnings
3233

3334
# for back compatibility
3435
from pyspark.sql import SQLContext, HiveContext, Row

0 commit comments

Comments
 (0)