Skip to content

Commit 40489c8

Browse files
Merge remote-tracking branch 'origin/master' into ZEPPELIN-2367
2 parents ee8a6b5 + f9830a7 commit 40489c8

File tree

28 files changed

+722
-432
lines changed

28 files changed

+722
-432
lines changed

.travis.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -59,19 +59,19 @@ matrix:
5959

6060
# Test spark module for 2.1.0 with scala 2.11, livy
6161
- jdk: "oraclejdk7"
62-
env: SCALA_VER="2.11" SPARK_VER="2.1.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.1 -Phadoop-2.6 -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,livy" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.livy.* -DfailIfNoTests=false"
62+
env: SCALA_VER="2.11" SPARK_VER="2.1.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.1 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,livy" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.livy.* -DfailIfNoTests=false"
6363

6464
# Test spark module for 2.0.2 with scala 2.11
6565
- jdk: "oraclejdk7"
66-
env: SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
66+
env: SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
6767

6868
# Test spark module for 1.6.3 with scala 2.10
6969
- jdk: "oraclejdk7"
70-
env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Psparkr -Pscala-2.10" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
70+
env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.*,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
7171

7272
# Test spark module for 1.6.3 with scala 2.11
7373
- jdk: "oraclejdk7"
74-
env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Psparkr -Pscala-2.11" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
74+
env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
7575

7676
# Test python/pyspark with python 2
7777
- jdk: "oraclejdk7"

dev/create_release.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,8 +106,8 @@ function make_binary_release() {
106106

107107
git_clone
108108
make_source_package
109-
make_binary_release all "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Psparkr -Pscala-${SCALA_VERSION}"
110-
make_binary_release netinst "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Psparkr -Pscala-${SCALA_VERSION} -pl zeppelin-interpreter,zeppelin-zengine,:zeppelin-display_${SCALA_VERSION},:zeppelin-spark-dependencies_${SCALA_VERSION},:zeppelin-spark_${SCALA_VERSION},zeppelin-web,zeppelin-server,zeppelin-distribution -am"
109+
make_binary_release all "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Pscala-${SCALA_VERSION}"
110+
make_binary_release netinst "-Pspark-2.1 -Phadoop-2.6 -Pyarn -Pscala-${SCALA_VERSION} -pl zeppelin-interpreter,zeppelin-zengine,:zeppelin-display_${SCALA_VERSION},:zeppelin-spark-dependencies_${SCALA_VERSION},:zeppelin-spark_${SCALA_VERSION},zeppelin-web,zeppelin-server,zeppelin-distribution -am"
111111

112112
# remove non release files and dirs
113113
rm -rf "${WORKING_DIR}/zeppelin"

dev/publish_release.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ if [[ $RELEASE_VERSION == *"SNAPSHOT"* ]]; then
4646
DO_SNAPSHOT="yes"
4747
fi
4848

49-
PUBLISH_PROFILES="-Ppublish-distr -Pspark-2.1 -Phadoop-2.6 -Pyarn -Psparkr -Pr"
49+
PUBLISH_PROFILES="-Ppublish-distr -Pspark-2.1 -Phadoop-2.6 -Pyarn -Pr"
5050
PROJECT_OPTIONS="-pl !zeppelin-distribution"
5151
NEXUS_STAGING="https://repository.apache.org/service/local/staging"
5252
NEXUS_PROFILE="153446d1ac37c4"

docs/install/build.md

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ If you're unsure about the options, use the same commands that creates official
6969
# update all pom.xml to use scala 2.11
7070
./dev/change_scala_version.sh 2.11
7171
# build zeppelin with all interpreters and include latest version of Apache spark support for local mode.
72-
mvn clean package -DskipTests -Pspark-2.0 -Phadoop-2.4 -Pyarn -Psparkr -Pr -Pscala-2.11
72+
mvn clean package -DskipTests -Pspark-2.0 -Phadoop-2.4 -Pyarn -Pr -Pscala-2.11
7373
```
7474

7575
####3. Done
@@ -149,10 +149,6 @@ enable YARN support for local mode
149149

150150
enable [R](https://www.r-project.org/) support with [SparkR](https://spark.apache.org/docs/latest/sparkr.html) integration.
151151

152-
##### `-Psparkr` (optional)
153-
154-
another [R](https://www.r-project.org/) support with [SparkR](https://spark.apache.org/docs/latest/sparkr.html) integration as well as local mode support.
155-
156152
##### `-Pvendor-repo` (optional)
157153

158154
enable 3rd party vendor repository (cloudera)
@@ -184,14 +180,14 @@ Here are some examples with several options:
184180
```bash
185181
# build with spark-2.1, scala-2.11
186182
./dev/change_scala_version.sh 2.11
187-
mvn clean package -Pspark-2.1 -Phadoop-2.4 -Pyarn -Psparkr -Pscala-2.11 -DskipTests
183+
mvn clean package -Pspark-2.1 -Phadoop-2.4 -Pyarn -Pscala-2.11 -DskipTests
188184

189185
# build with spark-2.0, scala-2.11
190186
./dev/change_scala_version.sh 2.11
191-
mvn clean package -Pspark-2.0 -Phadoop-2.4 -Pyarn -Psparkr -Pscala-2.11 -DskipTests
187+
mvn clean package -Pspark-2.0 -Phadoop-2.4 -Pyarn -Pscala-2.11 -DskipTests
192188

193189
# build with spark-1.6, scala-2.10
194-
mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -Psparkr -DskipTests
190+
mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -DskipTests
195191

196192
# spark-cassandra integration
197193
mvn clean package -Pcassandra-spark-1.5 -Dhadoop.version=2.6.0 -Phadoop-2.6 -DskipTests -DskipTests

docs/install/virtual_machine.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ This assumes you've already cloned the project either on the host machine in the
110110
111111
```
112112
cd /zeppelin
113-
mvn clean package -Pspark-1.6 -Phadoop-2.4 -Psparkr -DskipTests
113+
mvn clean package -Pspark-1.6 -Phadoop-2.4 -DskipTests
114114
./bin/zeppelin-daemon.sh start
115115
```
116116

docs/interpreter/jdbc.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,10 @@ There are more JDBC interpreter properties you can specify like below.
164164
<td>zeppelin.jdbc.keytab.location</td>
165165
<td>The path to the keytab file</td>
166166
</tr>
167+
<tr>
168+
<td>zeppelin.jdbc.auth.kerberos.proxy.enable</td>
169+
     <td>When auth type is Kerberos, enable/disable Kerberos proxy with the login user to get the connection. Default value is true.</td>
170+
</tr>
167171
<tr>
168172
<td>default.jceks.file</td>
169173
<td>jceks store path (e.g: jceks://file/tmp/zeppelin.jceks)</td>

geode/pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434

3535
<properties>
3636
<!--library versions-->
37-
<geode.version>1.0.0-incubating-SNAPSHOT</geode.version>
37+
<geode.version>1.1.0</geode.version>
3838
<commons.exec.version>1.3</commons.exec.version>
3939
</properties>
4040

@@ -48,7 +48,7 @@
4848

4949
<dependency>
5050
<groupId>org.apache.geode</groupId>
51-
<artifactId>gemfire-core</artifactId>
51+
<artifactId>geode-core</artifactId>
5252
<version>${geode.version}</version>
5353
</dependency>
5454

geode/src/main/java/org/apache/zeppelin/geode/GeodeOqlInterpreter.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,12 @@
2929
import org.slf4j.Logger;
3030
import org.slf4j.LoggerFactory;
3131

32-
import com.gemstone.gemfire.cache.client.ClientCache;
33-
import com.gemstone.gemfire.cache.client.ClientCacheFactory;
34-
import com.gemstone.gemfire.cache.query.QueryService;
35-
import com.gemstone.gemfire.cache.query.SelectResults;
36-
import com.gemstone.gemfire.cache.query.Struct;
37-
import com.gemstone.gemfire.pdx.PdxInstance;
32+
import org.apache.geode.cache.client.ClientCache;
33+
import org.apache.geode.cache.client.ClientCacheFactory;
34+
import org.apache.geode.cache.query.QueryService;
35+
import org.apache.geode.cache.query.SelectResults;
36+
import org.apache.geode.cache.query.Struct;
37+
import org.apache.geode.pdx.PdxInstance;
3838

3939
/**
4040
* Apache Geode OQL Interpreter (http://geode.apache.org)

geode/src/test/java/org/apache/zeppelin/geode/GeodeOqlInterpreterTest.java

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,14 @@
3737
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
3838
import org.junit.Test;
3939

40-
import com.gemstone.gemfire.cache.query.QueryService;
41-
import com.gemstone.gemfire.cache.query.SelectResults;
42-
import com.gemstone.gemfire.cache.query.Struct;
43-
import com.gemstone.gemfire.cache.query.internal.StructImpl;
44-
import com.gemstone.gemfire.cache.query.internal.types.StructTypeImpl;
45-
import com.gemstone.gemfire.pdx.PdxInstance;
46-
import com.gemstone.gemfire.pdx.internal.PdxInstanceImpl;
47-
import com.gemstone.gemfire.pdx.internal.PdxType;
40+
import org.apache.geode.cache.query.QueryService;
41+
import org.apache.geode.cache.query.SelectResults;
42+
import org.apache.geode.cache.query.Struct;
43+
import org.apache.geode.cache.query.internal.StructImpl;
44+
import org.apache.geode.cache.query.internal.types.StructTypeImpl;
45+
import org.apache.geode.pdx.PdxInstance;
46+
import org.apache.geode.pdx.internal.PdxInstanceImpl;
47+
import org.apache.geode.pdx.internal.PdxType;
4848

4949
public class GeodeOqlInterpreterTest {
5050

jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java

Lines changed: 41 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,8 @@ public Connection getConnection(String propertyKey, InterpreterContext interpret
370370
JDBCSecurityImpl.createSecureConfiguration(property, authType);
371371
switch (authType) {
372372
case KERBEROS:
373-
if (user == null) {
373+
if (user == null || "false".equalsIgnoreCase(
374+
property.getProperty("zeppelin.jdbc.auth.kerberos.proxy.enable"))) {
374375
connection = getConnectionFromPool(url, user, propertyKey, properties);
375376
} else {
376377
if (url.trim().startsWith("jdbc:hive")) {
@@ -515,19 +516,36 @@ private boolean isDDLCommand(int updatedCount, int columnCount) throws SQLExcept
515516
protected ArrayList<String> splitSqlQueries(String sql) {
516517
ArrayList<String> queries = new ArrayList<>();
517518
StringBuilder query = new StringBuilder();
518-
Character character;
519+
char character;
519520

520521
Boolean antiSlash = false;
522+
Boolean multiLineComment = false;
523+
Boolean singleLineComment = false;
521524
Boolean quoteString = false;
522525
Boolean doubleQuoteString = false;
523526

524527
for (int item = 0; item < sql.length(); item++) {
525528
character = sql.charAt(item);
526529

527-
if (character.equals('\\')) {
530+
if ((singleLineComment && (character == '\n' || item == sql.length() - 1))
531+
|| (multiLineComment && character == '/' && sql.charAt(item - 1) == '*')) {
532+
singleLineComment = false;
533+
multiLineComment = false;
534+
if (item == sql.length() - 1 && query.length() > 0) {
535+
queries.add(StringUtils.trim(query.toString()));
536+
}
537+
continue;
538+
}
539+
540+
if (singleLineComment || multiLineComment) {
541+
continue;
542+
}
543+
544+
if (character == '\\') {
528545
antiSlash = true;
529546
}
530-
if (character.equals('\'')) {
547+
548+
if (character == '\'') {
531549
if (antiSlash) {
532550
antiSlash = false;
533551
} else if (quoteString) {
@@ -536,7 +554,8 @@ protected ArrayList<String> splitSqlQueries(String sql) {
536554
quoteString = true;
537555
}
538556
}
539-
if (character.equals('"')) {
557+
558+
if (character == '"') {
540559
if (antiSlash) {
541560
antiSlash = false;
542561
} else if (doubleQuoteString) {
@@ -546,16 +565,30 @@ protected ArrayList<String> splitSqlQueries(String sql) {
546565
}
547566
}
548567

549-
if (character.equals(';') && !antiSlash && !quoteString && !doubleQuoteString) {
550-
queries.add(query.toString());
568+
if (!quoteString && !doubleQuoteString && !multiLineComment && !singleLineComment
569+
&& sql.length() > item + 1) {
570+
if (character == '-' && sql.charAt(item + 1) == '-') {
571+
singleLineComment = true;
572+
continue;
573+
}
574+
575+
if (character == '/' && sql.charAt(item + 1) == '*') {
576+
multiLineComment = true;
577+
continue;
578+
}
579+
}
580+
581+
if (character == ';' && !antiSlash && !quoteString && !doubleQuoteString) {
582+
queries.add(StringUtils.trim(query.toString()));
551583
query = new StringBuilder();
552584
} else if (item == sql.length() - 1) {
553585
query.append(character);
554-
queries.add(query.toString());
586+
queries.add(StringUtils.trim(query.toString()));
555587
} else {
556588
query.append(character);
557589
}
558590
}
591+
559592
return queries;
560593
}
561594

0 commit comments

Comments
 (0)