@@ -560,22 +560,24 @@ public int hashCode() {
560560 * String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
561561 * TableId tableId = TableId.of(datasetName, tableName);
562562 * // Table field definition
563- * Field[] fields = new Field[] {
564- * Field.of("name", LegacySQLTypeName.STRING),
565- * Field.of("post_abbr", LegacySQLTypeName.STRING)
566- * };
563+ * Field[] fields =
564+ * new Field[] {
565+ * Field.of("name", LegacySQLTypeName.STRING),
566+ * Field.of("post_abbr", LegacySQLTypeName.STRING)
567+ * };
567568 * // Table schema definition
568569 * Schema schema = Schema.of(fields);
569- * LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
570- * .setFormatOptions(FormatOptions.json())
571- * .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
572- * .setSchema(schema)
573- * .build();
570+ * LoadJobConfiguration configuration =
571+ * LoadJobConfiguration.builder(tableId, sourceUri)
572+ * .setFormatOptions(FormatOptions.json())
573+ * .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
574+ * .setSchema(schema)
575+ * .build();
574576 * // Load the table
575- * Job remoteLoadJob = bigquery.create(JobInfo.of(configuration));
576- * remoteLoadJob = remoteLoadJob .waitFor();
577+ * Job loadJob = bigquery.create(JobInfo.of(configuration));
578+ * loadJob = loadJob .waitFor();
577579 * // Check the table
578- * System.out.println("State: " + remoteLoadJob .getStatus().getState());
580+ * System.out.println("State: " + loadJob .getStatus().getState());
579581 * return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
580582 * }</pre>
581583 *
@@ -771,9 +773,25 @@ public int hashCode() {
771773 * String datasetName = "my_dataset_name";
772774 * String tableName = "my_table_name";
773775 * String newDescription = "new_description";
774- * Table oldTable = bigquery.getTable(datasetName, tableName);
775- * TableInfo tableInfo = oldTable.toBuilder().setDescription(newDescription).build();
776- * Table newTable = bigquery.update(tableInfo);
776+ * Table beforeTable = bigquery.getTable(datasetName, tableName);
777+ * TableInfo tableInfo = beforeTable.toBuilder()
778+ * .setDescription(newDescription)
779+ * .build();
780+ * Table afterTable = bigquery.update(tableInfo);
781+ * }</pre>
782+ *
783+ * <p>Example of updating a table by changing its expiration.
784+ * <pre> {@code
785+ * String datasetName = "my_dataset_name";
786+ * String tableName = "my_table_name";
787+ * Table beforeTable = bigquery.getTable(datasetName, tableName);
788+ *
789+ * // Set table to expire 5 days from now.
790+ * long expirationMillis = DateTime.now().plusDays(5).getMillis();
791+ * TableInfo tableInfo = beforeTable.toBuilder()
792+ * .setExpirationTime(expirationMillis)
793+ * .build();
794+ * Table afterTable = bigquery.update(tableInfo);
777795 * }</pre>
778796 *
779797 * @throws BigQueryException upon failure
@@ -869,10 +887,12 @@ public int hashCode() {
869887 * Map<String, Object> recordsContent = new HashMap<>();
870888 * recordsContent.put("stringField", "Hello, World!");
871889 * rowContent.put("recordField", recordsContent);
872- * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId)
873- * .addRow("rowId", rowContent)
874- * // More rows can be added in the same RPC by invoking .addRow() on the builder
875- * .build());
890+ * InsertAllResponse response =
891+ * bigquery.insertAll(
892+ * InsertAllRequest.newBuilder(tableId)
893+ * .addRow("rowId", rowContent)
894+ * // More rows can be added in the same RPC by invoking .addRow() on the builder
895+ * .build());
876896 * if (response.hasErrors()) {
877897 * // If any of the insertions failed, this lets you inspect the errors
878898 * for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) {
@@ -936,8 +956,7 @@ public int hashCode() {
936956 * String tableName = "my_table_name";
937957 * Schema schema = ...;
938958 * String field = "field";
939- * TableResult tableData =
940- * bigquery.listTableData(datasetName, tableName, schema);
959+ * TableResult tableData = bigquery.listTableData(datasetName, tableName, schema);
941960 * for (FieldValueList row : tableData.iterateAll()) {
942961 * row.get(field);
943962 * }
@@ -1083,10 +1102,8 @@ TableResult listTableData(
10831102 * <p>Example of running a query.
10841103 * <pre> {@code
10851104 * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
1086- * String query =
1087- * "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
1088- * QueryJobConfiguration queryConfig =
1089- * QueryJobConfiguration.newBuilder(query).build();
1105+ * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
1106+ * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
10901107 *
10911108 * // Print the results.
10921109 * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
@@ -1145,9 +1162,7 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
11451162 * String csvData = "StringValue1\nStringValue2\n";
11461163 * TableId tableId = TableId.of(datasetName, tableName);
11471164 * WriteChannelConfiguration writeChannelConfiguration =
1148- * WriteChannelConfiguration.newBuilder(tableId)
1149- * .setFormatOptions(FormatOptions.csv())
1150- * .build();
1165+ * WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
11511166 * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
11521167 * // Write data to writer
11531168 * try {
@@ -1170,9 +1185,7 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
11701185 * String location = "us";
11711186 * TableId tableId = TableId.of(datasetName, tableName);
11721187 * WriteChannelConfiguration writeChannelConfiguration =
1173- * WriteChannelConfiguration.newBuilder(tableId)
1174- * .setFormatOptions(FormatOptions.csv())
1175- * .build();
1188+ * WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
11761189 * // The location must be specified; other fields can be auto-detected.
11771190 * JobId jobId = JobId.newBuilder().setLocation(location).build();
11781191 * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
0 commit comments