|
| 1 | +// Copyright 2016, Google, Inc. |
| 2 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 3 | +// you may not use this file except in compliance with the License. |
| 4 | +// You may obtain a copy of the License at |
| 5 | +// |
| 6 | +// http://www.apache.org/licenses/LICENSE-2.0 |
| 7 | +// |
| 8 | +// Unless required by applicable law or agreed to in writing, software |
| 9 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 10 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 11 | +// See the License for the specific language governing permissions and |
| 12 | +// limitations under the License. |
| 13 | + |
| 14 | +'use strict'; |
| 15 | + |
| 16 | +var async = require('async'); |
| 17 | + |
| 18 | +// [START auth] |
| 19 | +// You must set the GOOGLE_APPLICATION_CREDENTIALS and GCLOUD_PROJECT |
| 20 | +// environment variables to run this sample |
| 21 | +var projectId = process.env.GCLOUD_PROJECT; |
| 22 | + |
| 23 | +// Initialize gcloud |
| 24 | +var gcloud = require('gcloud')({ |
| 25 | + projectId: projectId |
| 26 | +}); |
| 27 | + |
| 28 | +// Get a reference to the bigquery component |
| 29 | +var bigquery = gcloud.bigquery(); |
| 30 | +// [END auth] |
| 31 | + |
| 32 | +// not going to use this bigquery instance |
| 33 | +bigquery = undefined; |
| 34 | + |
| 35 | +// [START list_tables] |
| 36 | +/** |
| 37 | + * Retrieve all tables for the specified dataset. |
| 38 | + * |
| 39 | + * @param {Object} bigquery gcloud-node bigquery client. |
| 40 | + * @param {string} datasetId Dataset of the tables to retrieve. |
| 41 | + * @param {string} [pageToken] Page to retrieve. |
| 42 | + * @param {Function} callback Callback function. |
| 43 | + */ |
| 44 | +function getAllTablesExample(bigquery, datasetId, pageToken, callback) { |
| 45 | + if (typeof pageToken === 'function') { |
| 46 | + callback = pageToken; |
| 47 | + pageToken = undefined; |
| 48 | + } |
| 49 | + var dataset = bigquery.dataset(datasetId); |
| 50 | + var options = {}; |
| 51 | + if (pageToken) { |
| 52 | + options.pageToken = pageToken; |
| 53 | + } |
| 54 | + |
| 55 | + // Grab paginated tables |
| 56 | + dataset.getTables(options, function (err, tables, nextQuery) { |
| 57 | + // Quit on error |
| 58 | + if (err) { |
| 59 | + return callback(err); |
| 60 | + } |
| 61 | + |
| 62 | + // There is another page of tables |
| 63 | + if (nextQuery) { |
| 64 | + // Grab the remaining pages of tables recursively |
| 65 | + return getAllTablesExample( |
| 66 | + datasetId, |
| 67 | + nextQuery.token, |
| 68 | + function (err, _tables) { |
| 69 | + if (err) { |
| 70 | + return callback(err); |
| 71 | + } |
| 72 | + callback(null, tables.concat(_tables)); |
| 73 | + } |
| 74 | + ); |
| 75 | + } |
| 76 | + // Last page of tables |
| 77 | + return callback(null, tables); |
| 78 | + }); |
| 79 | +} |
| 80 | +// [END list_tables] |
| 81 | + |
| 82 | +// [START get_size] |
| 83 | +/** |
| 84 | + * Retrieve the size of the specified dataset. |
| 85 | + * |
| 86 | + * @param {string} projectId The project, .e.g. "bigquery-public-data" |
| 87 | + * @param {string} datasetId The dataset, e.g. "hacker_news" |
| 88 | + * @param {Function} callback Callback function. |
| 89 | + */ |
| 90 | +function getSizeExample(projectId, datasetId, callback) { |
| 91 | + if (!projectId) { |
| 92 | + return callback(new Error('projectId is required!')); |
| 93 | + } |
| 94 | + if (!datasetId) { |
| 95 | + return callback(new Error('datasetId is require!')); |
| 96 | + } |
| 97 | + |
| 98 | + var gcloud = require('gcloud')({ |
| 99 | + projectId: projectId || process.env.GCLOUD_PROJECT |
| 100 | + }); |
| 101 | + var bigquery = gcloud.bigquery(); |
| 102 | + |
| 103 | + // Fetch all tables in the dataset |
| 104 | + getAllTablesExample(bigquery, datasetId, function (err, tables) { |
| 105 | + return async.parallel(tables.map(function (table) { |
| 106 | + return function (cb) { |
| 107 | + // Fetch more detailed info for each table |
| 108 | + table.get(function (err, tableInfo) { |
| 109 | + if (err) { |
| 110 | + return cb(err); |
| 111 | + } |
| 112 | + // Return numBytes converted to Megabytes |
| 113 | + var numBytes = tableInfo.metadata.numBytes; |
| 114 | + return cb(null, (parseInt(numBytes, 10) / 1000) / 1000); |
| 115 | + }); |
| 116 | + }; |
| 117 | + }), function (err, sizes) { |
| 118 | + if (err) { |
| 119 | + return callback(err); |
| 120 | + } |
| 121 | + var sum = sizes.reduce(function (cur, prev) { |
| 122 | + return cur + prev; |
| 123 | + }, 0); |
| 124 | + return callback(null, sum); |
| 125 | + }); |
| 126 | + }); |
| 127 | +} |
| 128 | +// [END get_size] |
| 129 | + |
| 130 | +// Run the examples |
| 131 | +exports.main = function (projectId, datasetId, cb) { |
| 132 | + getSizeExample(projectId, datasetId, function (err, sum) { |
| 133 | + if (err) { |
| 134 | + return cb(err); |
| 135 | + } |
| 136 | + var size = 'MB'; |
| 137 | + if (sum > 1000) { |
| 138 | + sum = sum / 1000; |
| 139 | + size = 'GB'; |
| 140 | + } |
| 141 | + if (sum > 1000) { |
| 142 | + sum = sum / 1000; |
| 143 | + size = 'TB'; |
| 144 | + } |
| 145 | + cb(null, '' + sum.toPrecision(5) + ' ' + size); |
| 146 | + }); |
| 147 | +}; |
| 148 | + |
| 149 | +if (module === require.main) { |
| 150 | + var args = process.argv.slice(2); |
| 151 | + if (args.length !== 2) { |
| 152 | + throw new Error('Usage: node dataset_size.js <projectId> <datasetId>'); |
| 153 | + } |
| 154 | + exports.main( |
| 155 | + args[0], |
| 156 | + args[1], |
| 157 | + console.log |
| 158 | + ); |
| 159 | +} |
0 commit comments