Skip to content

Commit e80cfb7

Browse files
mmladenovskitswast
authored andcommitted
---
yaml --- r: 30495 b: refs/heads/autosynth-automl c: 88f539a h: refs/heads/master i: 30493: 0baeedb 30491: 87d7d0a 30487: a910703 30479: 4b139bb 30463: 61d5885
1 parent 62ccda6 commit e80cfb7

7 files changed

Lines changed: 415 additions & 1 deletion

File tree

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ refs/heads/spanner: b01127f885b4611bf1852abb0ce481eeb7fcc131
121121
refs/tags/v0.68.0: 9cc799fcf68c82ab431d425fefa58ef615ce8e5b
122122
refs/tags/v0.69.0: 78f67a29e8b9c46ba01de566a2eae0fd1c03edea
123123
refs/heads/autosynth-asset: bdb45634a0fe8f7a510692b56b31f5312e25f453
124-
refs/heads/autosynth-automl: 7ef8b85b7d1bd06fbf0f145f73ca9825b01589bb
124+
refs/heads/autosynth-automl: 88f539a363feff0b44b7b5318e8cd1c425702c89
125125
refs/heads/autosynth-bigquerydatatransfer: d88aa5aae5fd9d3c6d75bbab1a05162c6d4d948f
126126
refs/heads/autosynth-bigquerystorage: d2c53da3b012e38c662e4df0738042435f19365f
127127
refs/heads/autosynth-bigtable: 9e5429f45cf9face9fed585d0233534993e36b58
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
# Configure the docker image for kokoro-trampoline.
4+
env_vars: {
5+
key: "TRAMPOLINE_IMAGE"
6+
value: "gcr.io/cloud-devrel-kokoro-resources/java8"
7+
}
8+
9+
env_vars: {
10+
key: "INTEGRATION_TEST_ARGS"
11+
value: "google-cloud-clients/google-cloud-bigquerystorage"
12+
}
13+
14+
env_vars: {
15+
key: "JOB_TYPE"
16+
value: "integration"
17+
}
18+
19+
env_vars: {
20+
key: "GCLOUD_PROJECT"
21+
value: "gcloud-devel"
22+
}
23+
24+
env_vars: {
25+
key: "GOOGLE_APPLICATION_CREDENTIALS"
26+
value: "keystore/73713_java_it_service_account"
27+
}
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
# Configure the docker image for kokoro-trampoline.
4+
env_vars: {
5+
key: "TRAMPOLINE_IMAGE"
6+
value: "gcr.io/cloud-devrel-kokoro-resources/java8"
7+
}
8+
9+
env_vars: {
10+
key: "INTEGRATION_TEST_ARGS"
11+
value: "google-cloud-clients/google-cloud-bigquerystorage"
12+
}
13+
14+
env_vars: {
15+
key: "JOB_TYPE"
16+
value: "integration"
17+
}
18+
19+
env_vars: {
20+
key: "GCLOUD_PROJECT"
21+
value: "gcloud-devel"
22+
}
23+
24+
env_vars: {
25+
key: "GOOGLE_APPLICATION_CREDENTIALS"
26+
value: "keystore/73713_java_it_service_account"
27+
}
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Format: //devtools/kokoro/config/proto/build.proto
2+
3+
# Configure the docker image for kokoro-trampoline.
4+
env_vars: {
5+
key: "TRAMPOLINE_IMAGE"
6+
value: "gcr.io/cloud-devrel-kokoro-resources/java8"
7+
}
8+
9+
env_vars: {
10+
key: "INTEGRATION_TEST_ARGS"
11+
value: "google-cloud-clients/google-cloud-bigquerystorage"
12+
}
13+
14+
env_vars: {
15+
key: "JOB_TYPE"
16+
value: "integration"
17+
}
18+
19+
env_vars: {
20+
key: "GCLOUD_PROJECT"
21+
value: "gcloud-devel"
22+
}
23+
24+
env_vars: {
25+
key: "GOOGLE_APPLICATION_CREDENTIALS"
26+
value: "keystore/73713_java_it_service_account"
27+
}

branches/autosynth-automl/google-cloud-clients/google-cloud-bigquerystorage/pom.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,12 @@
4141
<artifactId>gax-grpc</artifactId>
4242
<scope>test</scope>
4343
</dependency>
44+
<dependency>
45+
<groupId>org.apache.avro</groupId>
46+
<artifactId>avro</artifactId>
47+
<version>1.9.0</version>
48+
<scope>test</scope>
49+
</dependency>
4450
<!-- Need testing utility classes for generated gRPC clients tests -->
4551
<dependency>
4652
<groupId>com.google.api</groupId>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,257 @@
1+
/*
2+
* Copyright 2019 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package com.google.cloud.bigquery.storage.v1beta1.it;
18+
19+
import static com.google.common.truth.Truth.assertWithMessage;
20+
import static org.junit.Assert.assertEquals;
21+
import static org.junit.Assert.assertTrue;
22+
23+
import com.google.api.gax.rpc.ServerStream;
24+
import com.google.cloud.ServiceOptions;
25+
import com.google.cloud.bigquery.storage.v1beta1.BigQueryStorageClient;
26+
import com.google.cloud.bigquery.storage.v1beta1.ReadOptions.TableReadOptions;
27+
import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest;
28+
import com.google.cloud.bigquery.storage.v1beta1.Storage.DataFormat;
29+
import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest;
30+
import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse;
31+
import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession;
32+
import com.google.cloud.bigquery.storage.v1beta1.Storage.StreamPosition;
33+
import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference;
34+
import com.google.protobuf.TextFormat;
35+
import java.io.IOException;
36+
import java.util.logging.Logger;
37+
import org.apache.avro.Schema;
38+
import org.apache.avro.generic.GenericRecord;
39+
import org.apache.avro.util.Utf8;
40+
import org.junit.AfterClass;
41+
import org.junit.BeforeClass;
42+
import org.junit.Test;
43+
44+
/** Integration tests for BigQuery Storage API. */
45+
public class ITBigQueryStorageTest {
46+
47+
private static final Logger LOG = Logger.getLogger(ITBigQueryStorageTest.class.getName());
48+
49+
private static BigQueryStorageClient client;
50+
private static String parentProjectId;
51+
52+
@BeforeClass
53+
public static void beforeClass() throws IOException {
54+
client = BigQueryStorageClient.create();
55+
parentProjectId = String.format("projects/%s", ServiceOptions.getDefaultProjectId());
56+
57+
LOG.info(
58+
String.format(
59+
"%s tests running with parent project: %s",
60+
ITBigQueryStorageTest.class.getSimpleName(), parentProjectId));
61+
}
62+
63+
@AfterClass
64+
public static void afterClass() {
65+
if (client != null) {
66+
client.close();
67+
}
68+
}
69+
70+
@Test
71+
public void testSimpleRead() {
72+
TableReference tableReference =
73+
TableReference.newBuilder()
74+
.setProjectId("bigquery-public-data")
75+
.setDatasetId("samples")
76+
.setTableId("shakespeare")
77+
.build();
78+
79+
ReadSession session = client.createReadSession(tableReference, parentProjectId, 1);
80+
assertEquals(
81+
String.format(
82+
"Did not receive expected number of streams for table reference '%s' CreateReadSession response:%n%s",
83+
TextFormat.shortDebugString(tableReference), session.toString()),
84+
1,
85+
session.getStreamsCount());
86+
87+
StreamPosition readPosition =
88+
StreamPosition.newBuilder().setStream(session.getStreams(0)).build();
89+
90+
ReadRowsRequest readRowsRequest =
91+
ReadRowsRequest.newBuilder().setReadPosition(readPosition).build();
92+
93+
long avroRowCount = 0;
94+
ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
95+
for (ReadRowsResponse response : stream) {
96+
assertTrue(
97+
String.format(
98+
"Response is missing 'avro_rows'. Read %d rows so far. ReadRows response:%n%s",
99+
avroRowCount, response.toString()),
100+
response.hasAvroRows());
101+
avroRowCount += response.getAvroRows().getRowCount();
102+
}
103+
104+
assertEquals(164_656, avroRowCount);
105+
}
106+
107+
@Test
108+
public void testFilter() throws IOException {
109+
TableReference tableReference =
110+
TableReference.newBuilder()
111+
.setProjectId("bigquery-public-data")
112+
.setDatasetId("samples")
113+
.setTableId("shakespeare")
114+
.build();
115+
116+
TableReadOptions options =
117+
TableReadOptions.newBuilder().setRowRestriction("word_count > 100").build();
118+
119+
CreateReadSessionRequest request =
120+
CreateReadSessionRequest.newBuilder()
121+
.setParent(parentProjectId)
122+
.setRequestedStreams(1)
123+
.setTableReference(tableReference)
124+
.setReadOptions(options)
125+
.setFormat(DataFormat.AVRO)
126+
.build();
127+
128+
ReadSession session = client.createReadSession(request);
129+
assertEquals(
130+
String.format(
131+
"Did not receive expected number of streams for table reference '%s' CreateReadSession response:%n%s",
132+
TextFormat.shortDebugString(tableReference), session.toString()),
133+
1,
134+
session.getStreamsCount());
135+
136+
StreamPosition readPosition =
137+
StreamPosition.newBuilder().setStream(session.getStreams(0)).build();
138+
139+
ReadRowsRequest readRowsRequest =
140+
ReadRowsRequest.newBuilder().setReadPosition(readPosition).build();
141+
142+
SimpleRowReader reader =
143+
new SimpleRowReader(new Schema.Parser().parse(session.getAvroSchema().getSchema()));
144+
145+
long avroRowCount = 0;
146+
147+
ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
148+
for (ReadRowsResponse response : stream) {
149+
assertTrue(
150+
String.format(
151+
"Response is missing 'avro_rows'. Read %d rows so far. ReadRows response:%n%s",
152+
avroRowCount, response.toString()),
153+
response.hasAvroRows());
154+
avroRowCount += response.getAvroRows().getRowCount();
155+
156+
reader.processRows(
157+
response.getAvroRows(),
158+
new SimpleRowReader.AvroRowConsumer() {
159+
@Override
160+
public void accept(GenericRecord record) {
161+
Long wordCount = (Long) record.get("word_count");
162+
assertWithMessage("Row not matching expectations: %s", record.toString())
163+
.that(wordCount)
164+
.isGreaterThan(100L);
165+
}
166+
});
167+
}
168+
169+
assertEquals(1_333, avroRowCount);
170+
}
171+
172+
@Test
173+
public void testColumnSelection() throws IOException {
174+
TableReference tableReference =
175+
TableReference.newBuilder()
176+
.setProjectId("bigquery-public-data")
177+
.setDatasetId("samples")
178+
.setTableId("shakespeare")
179+
.build();
180+
181+
TableReadOptions options =
182+
TableReadOptions.newBuilder()
183+
.addSelectedFields("word")
184+
.addSelectedFields("word_count")
185+
.setRowRestriction("word_count > 100")
186+
.build();
187+
188+
CreateReadSessionRequest request =
189+
CreateReadSessionRequest.newBuilder()
190+
.setParent(parentProjectId)
191+
.setRequestedStreams(1)
192+
.setTableReference(tableReference)
193+
.setReadOptions(options)
194+
.setFormat(DataFormat.AVRO)
195+
.build();
196+
197+
ReadSession session = client.createReadSession(request);
198+
assertEquals(
199+
String.format(
200+
"Did not receive expected number of streams for table reference '%s' CreateReadSession response:%n%s",
201+
TextFormat.shortDebugString(tableReference), session.toString()),
202+
1,
203+
session.getStreamsCount());
204+
205+
StreamPosition readPosition =
206+
StreamPosition.newBuilder().setStream(session.getStreams(0)).build();
207+
208+
ReadRowsRequest readRowsRequest =
209+
ReadRowsRequest.newBuilder().setReadPosition(readPosition).build();
210+
211+
Schema avroSchema = new Schema.Parser().parse(session.getAvroSchema().getSchema());
212+
213+
String actualSchemaMessage =
214+
String.format(
215+
"Unexpected schema. Actual schema:%n%s", avroSchema.toString(/* pretty = */ true));
216+
assertEquals(actualSchemaMessage, Schema.Type.RECORD, avroSchema.getType());
217+
assertEquals(actualSchemaMessage, "__root__", avroSchema.getName());
218+
219+
assertEquals(actualSchemaMessage, 2, avroSchema.getFields().size());
220+
assertEquals(
221+
actualSchemaMessage, Schema.Type.STRING, avroSchema.getField("word").schema().getType());
222+
assertEquals(
223+
actualSchemaMessage,
224+
Schema.Type.LONG,
225+
avroSchema.getField("word_count").schema().getType());
226+
227+
SimpleRowReader reader = new SimpleRowReader(avroSchema);
228+
229+
long avroRowCount = 0;
230+
ServerStream<ReadRowsResponse> stream = client.readRowsCallable().call(readRowsRequest);
231+
for (ReadRowsResponse response : stream) {
232+
assertTrue(
233+
String.format(
234+
"Response is missing 'avro_rows'. Read %d rows so far. ReadRows response:%n%s",
235+
avroRowCount, response.toString()),
236+
response.hasAvroRows());
237+
avroRowCount += response.getAvroRows().getRowCount();
238+
reader.processRows(
239+
response.getAvroRows(),
240+
new SimpleRowReader.AvroRowConsumer() {
241+
@Override
242+
public void accept(GenericRecord record) {
243+
String rowAssertMessage =
244+
String.format("Row not matching expectations: %s", record.toString());
245+
246+
Long wordCount = (Long) record.get("word_count");
247+
assertWithMessage(rowAssertMessage).that(wordCount).isGreaterThan(100L);
248+
249+
Utf8 word = (Utf8) record.get("word");
250+
assertWithMessage(rowAssertMessage).that(word.length()).isGreaterThan(0);
251+
}
252+
});
253+
}
254+
255+
assertEquals(1_333, avroRowCount);
256+
}
257+
}

0 commit comments

Comments
 (0)