Skip to content
This repository was archived by the owner on Feb 7, 2026. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 13 additions & 39 deletions samples/datasets.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

'use strict';

function createDataset(datasetId, projectId) {
async function createDataset(datasetId, projectId) {
// [START bigquery_create_dataset]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
Expand All @@ -27,24 +27,15 @@ function createDataset(datasetId, projectId) {
// const datasetId = "my_new_dataset";

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});

// Creates a new dataset
bigquery
.createDataset(datasetId)
.then(results => {
const dataset = results[0];
console.log(`Dataset ${dataset.id} created.`);
})
.catch(err => {
console.error('ERROR:', err);
});
const [dataset] = await bigquery.createDataset(datasetId);
console.log(`Dataset ${dataset.id} created.`);
// [END bigquery_create_dataset]
}

function deleteDataset(datasetId, projectId) {
async function deleteDataset(datasetId, projectId) {
// [START bigquery_delete_dataset]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
Expand All @@ -56,26 +47,18 @@ function deleteDataset(datasetId, projectId) {
// const datasetId = "my_dataset";

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});

// Creates a reference to the existing dataset
const dataset = bigquery.dataset(datasetId);

// Deletes the dataset
dataset
.delete()
.then(() => {
console.log(`Dataset ${dataset.id} deleted.`);
})
.catch(err => {
console.error('ERROR:', err);
});
await dataset.delete();
console.log(`Dataset ${dataset.id} deleted.`);
// [END bigquery_delete_dataset]
}

function listDatasets(projectId) {
async function listDatasets(projectId) {
// [START bigquery_list_datasets]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
Expand All @@ -86,21 +69,12 @@ function listDatasets(projectId) {
// const projectId = "your-project-id";

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});

// Lists all datasets in the specified project
bigquery
.getDatasets()
.then(results => {
const datasets = results[0];
console.log('Datasets:');
datasets.forEach(dataset => console.log(dataset.id));
})
.catch(err => {
console.error('ERROR:', err);
});
const [datasets] = await bigquery.getDatasets();
console.log('Datasets:');
datasets.forEach(dataset => console.log(dataset.id));
// [END bigquery_list_datasets]
}

Expand Down
92 changes: 28 additions & 64 deletions samples/queries.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@
function printResult(rows) {
// [START bigquery_simple_app_print]
console.log('Query Results:');
rows.forEach(function(row) {
rows.forEach(row => {
const url = row['url'];
const viewCount = row['view_count'];
console.log(`url: ${url}, ${viewCount} views`);
});
// [END bigquery_simple_app_print]
}

function queryStackOverflow(projectId) {
async function queryStackOverflow(projectId) {
// [START bigquery_simple_app_deps]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
Expand All @@ -40,9 +40,7 @@ function queryStackOverflow(projectId) {
// const projectId = "your-project-id";

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});
// [END bigquery_simple_app_client]

// [START bigquery_simple_app_query]
Expand All @@ -64,20 +62,13 @@ function queryStackOverflow(projectId) {
};

// Runs the query
bigquery
.query(options)
.then(results => {
const rows = results[0];
printResult(rows);
})
.catch(err => {
console.error('ERROR:', err);
});
const [rows] = await bigquery.query(options);
printResult(rows);
// [END bigquery_simple_app_query]
}
// [END bigquery_simple_app_all]

function syncQuery(sqlQuery, projectId) {
async function syncQuery(sqlQuery, projectId) {
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');

Expand All @@ -88,9 +79,7 @@ function syncQuery(sqlQuery, projectId) {
// const sqlQuery = "SELECT * FROM publicdata.samples.natality LIMIT 5;";

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});

// Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query
const options = {
Expand All @@ -100,19 +89,12 @@ function syncQuery(sqlQuery, projectId) {
};

// Runs the query
bigquery
.query(options)
.then(results => {
const rows = results[0];
console.log('Rows:');
rows.forEach(row => console.log(row));
})
.catch(err => {
console.error('ERROR:', err);
});
const [rows] = await bigquery.query(options);

This comment was marked as spam.

console.log('Rows:');
rows.forEach(row => console.log(row));
}

function asyncQuery(sqlQuery, projectId) {
async function asyncQuery(sqlQuery, projectId) {
// [START bigquery_query]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
Expand All @@ -124,49 +106,31 @@ function asyncQuery(sqlQuery, projectId) {
// const sqlQuery = "SELECT * FROM publicdata.samples.natality LIMIT 5;";

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});

// Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query
const options = {
query: sqlQuery,
useLegacySql: false, // Use standard SQL syntax for queries.
};

let job;

// Runs the query as a job
bigquery
.createQueryJob(options)
.then(results => {
job = results[0];
console.log(`Job ${job.id} started.`);
return job.promise();
})
.then(() => {
// Get the job's status
return job.getMetadata();
})
.then(metadata => {
// Check the job's status for errors
const errors = metadata[0].status.errors;
if (errors && errors.length > 0) {
throw errors;
}
})
.then(() => {
console.log(`Job ${job.id} completed.`);
return job.getQueryResults();
})
.then(results => {
const rows = results[0];
console.log('Rows:');
rows.forEach(row => console.log(row));
})
.catch(err => {
console.error('ERROR:', err);
});
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);

// Get the job's status
const metadata = await job.getMetadata();

// Check the job's status for errors
const errors = metadata[0].status.errors;
if (errors && errors.length > 0) {
throw errors;
}
console.log(`Job ${job.id} completed.`);

const [rows] = await job.getQueryResults();
console.log('Rows:');
rows.forEach(row => console.log(row));
// [END bigquery_query]
}

Expand Down
19 changes: 7 additions & 12 deletions samples/quickstart.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,16 @@ const BigQuery = require('@google-cloud/bigquery');
const projectId = 'YOUR_PROJECT_ID';

// Creates a client
const bigquery = new BigQuery({
projectId: projectId,
});
const bigquery = new BigQuery({projectId});

// The name for the new dataset
const datasetName = 'my_new_dataset';

// Creates the new dataset
bigquery
.createDataset(datasetName)
.then(results => {
const dataset = results[0];
console.log(`Dataset ${dataset.id} created.`);
})
.catch(err => {
console.error('ERROR:', err);
});
async function createDataset() {
const [dataset] = await bigquery.createDataset(datasetName);
console.log(`Dataset ${dataset.id} created.`);
}
// [END bigquery_quickstart]

createDataset().catch(console.error);
Loading