diff --git a/lib/bigquery/dataset.js b/lib/bigquery/dataset.js index 68d165c2a12..924456574f7 100644 --- a/lib/bigquery/dataset.js +++ b/lib/bigquery/dataset.js @@ -20,6 +20,8 @@ 'use strict'; +var extend = require('extend'); + /** * @type {module:bigquery/table} * @private @@ -27,30 +29,40 @@ var Table = require('./table.js'); /** - * Create a Dataset object. + * @type {module:common/util} + * @private + */ +var util = require('../common/util.js'); + +/*! Developer Documentation + * + * @param {module:bigquery} bigQuery - The parent BigQuery instance. + * @param {string} datasetId - The id of the Dataset. + */ +/** + * Interact with your BigQuery dataset. Create a Dataset instance with + * {@link module:bigquery#createDataset} or {@link module:bigquery#dataset}. * * @alias module:bigquery/dataset * @constructor - * - * @param {string} datasetId - The id of the Dataset. */ -function Dataset(datasetId) { - if (!(this instanceof Dataset)) { - return new Dataset(datasetId); - } +function Dataset(bigQuery, datasetId) { + this.bigQuery = bigQuery; this.id = datasetId; } /** * Create a table given a tableId or configuration object. * - * @param {string|object} options - Table id or configuration object. + * @param {object} options - Table id or configuration object. * @param {string} options.id - The id of the table. * @param {string|object} options.schema - A comma-separated list of name:type * pairs. Valid types are "string", "integer", "float", "boolean", and * "timestamp". If the type is omitted, it is assumed to be "string". * Example: "name:string, age:integer". Schemas can also be specified as a - * JSON array of fields, which allows for nested and repeated fields. + * JSON array of fields, which allows for nested and repeated fields. See + * https://cloud.google.com/bigquery/docs/reference/v2/tables#resource for + * more detailed information. * @param {function} callback - The callback function. * * @example @@ -69,7 +81,36 @@ function Dataset(datasetId) { * }); */ Dataset.prototype.createTable = function(options, callback) { - throw new Error('Not implemented.'); + if (util.is(options.schema, 'string')) { + options.schema = + options.schema.split(/\s*,\s*/).reduce(function(acc, pair) { + acc.fields.push({ + name: pair.split(':')[0], + type: pair.split(':')[1] || 'string' + }); + return acc; + }, { fields: [] }); + } + var body = { + schema: options.schema, + tableReference: { + datasetId: this.id, + projectId: this.bigQuery.projectId, + tableId: options.id + } + }; + delete options.id; + delete options.schema; + extend(true, body, options); + this.makeReq_('POST', '/tables', null, body, function(err, resp) { + if (err) { + callback(err); + return; + } + var table = this.table(resp.tableReference.tableId); + table.metadata = resp; + callback(null, table); + }.bind(this)); }; /** @@ -77,6 +118,7 @@ Dataset.prototype.createTable = function(options, callback) { * * @param {object=} options - The configuration object. * @param {boolean} options.force - Force delete dataset and all tables. + * (default: false) * @param {function} callback - The callback function. * * @example @@ -89,7 +131,12 @@ Dataset.prototype.createTable = function(options, callback) { * }); */ Dataset.prototype.delete = function(options, callback) { - throw new Error('Not implemented.'); + if (!callback) { + callback = options; + options = {}; + } + var query = { deleteContents: !!options.force }; + this.makeReq_('DELETE', '', query, null, callback); }; /** @@ -105,24 +152,57 @@ Dataset.prototype.delete = function(options, callback) { * }); */ Dataset.prototype.getMetadata = function(callback) { - throw new Error('Not implemented.'); + this.makeReq_('GET', '', null, null, function(err, resp) { + if (err) { + callback(err); + return; + } + this.metadata = resp; + callback(null, this.metadata); + }.bind(this)); }; /** * Get a list of tables. * - * @param {object=} options - The configuration object. + * @param {object=} query - Configuration object. + * @param {number} query.maxResults - Maximum number of results to return. + * @param {string} query.pageToken - Token returned from a previous call, to + * request the next page of results. * @param {function} callback - The callback function. * * @example * var myDataset = bigquery.dataset(datasetId); * - * myDataset.getTables(function(err, tables) { - * // Use the tables. + * myDataset.getTables(function(err, tables, nextQuery) { + * // If `nextQuery` is non-null, there are more results to fetch. * }); */ -Dataset.prototype.getTables = function(options, callback) { - throw new Error('Not implemented.'); +Dataset.prototype.getTables = function(query, callback) { + var that = this; + if (!callback) { + callback = query; + query = {}; + } + query = query || {}; + this.makeReq_('GET', '/tables', query, null, function(err, resp) { + if (err) { + callback(err); + return; + } + var nextQuery = null; + if (resp.nextPageToken) { + nextQuery = extend({}, query, { + pageToken: resp.nextPageToken + }); + } + var tables = (resp.tables || []).map(function(tableObject) { + var table = that.table(tableObject.id); + table.metadata = tableObject; + return table; + }); + callback(null, tables, nextQuery); + }); }; /** @@ -143,23 +223,44 @@ Dataset.prototype.getTables = function(options, callback) { * }); */ Dataset.prototype.setMetadata = function(metadata, callback) { - throw new Error('Not implemented.'); + this.makeReq_('PUT', '', null, metadata, function(err, resp) { + if (err) { + callback(err); + return; + } + this.metadata = resp; + callback(null, this.metadata); + }.bind(this)); }; /** * Return a new instance of reference to an existing Table object. * * @param {string} tableId - The ID of the table. - * @return {module:bigquery/table} Reference to existing Table object. + * @return {module:bigquery/table} * * @example * var kittens = myDataset.table('my-kittens'); */ Dataset.prototype.table = function(tableId) { - return new Table({ - dataset: this, - id: tableId - }); + return new Table(this, tableId); +}; + +/** + * Pass through this request to BigQuery's request handler, first prepending the + * path with the dataset. + * + * @private + * + * @param {string} method - Action. + * @param {string} path - Request path. + * @param {*} query - Request query object. + * @param {*} body - Request body contents. + * @param {function} callback - The callback function. + */ +Dataset.prototype.makeReq_ = function(method, path, query, body, callback) { + path = '/datasets/' + this.id + path; + this.bigQuery.makeReq_(method, path, query, body, callback); }; module.exports = Dataset; diff --git a/lib/bigquery/index.js b/lib/bigquery/index.js index 87589b62cc4..5918db252a9 100644 --- a/lib/bigquery/index.js +++ b/lib/bigquery/index.js @@ -101,31 +101,12 @@ BigQuery.prototype.createDataset = function(datasetId, callback) { callback(err); return; } - var dataset = this.dataset(datasetId); + var dataset = this.dataset(this, datasetId); dataset.metadata = resp; callback(null, dataset); }.bind(this)); }; -/** - * Create a new job. - * - * @param {object} config - The configuration object. - * @param {function} callback - The callback function. - * - * @example - * - * var config = { - * - * }; - * bigquery.createJob(config, function(err, job) { - * // Use your newly created job. - * }); - */ -BigQuery.prototype.createJob = function(options, callback) { - throw new Error('Not implemented.'); -}; - /** * Create a reference to an existing dataset. * @@ -133,7 +114,7 @@ BigQuery.prototype.createJob = function(options, callback) { * @return {module:bigquery/dataset} */ BigQuery.prototype.dataset = function(datasetId) { - return new Dataset(datasetId); + return new Dataset(this, datasetId); }; /** @@ -173,7 +154,7 @@ BigQuery.prototype.getDatasets = function(query, callback) { }); } var datasets = (resp.datasets || []).map(function(dataset) { - var ds = that.dataset(dataset.datasetReference.datasetId); + var ds = that.dataset(that, dataset.datasetReference.datasetId); ds.metadata = dataset; return ds; }); @@ -249,38 +230,6 @@ BigQuery.prototype.job = function(jobId) { return new Job(jobId); }; -/** - * Run a query in BigQuery. - * - * var myQuery = { - * query: ‘SELECT * FROM users’, - * dryRun: true, - * maxResults: 123, - * useQueryCache: false, - * - * // Automatically concat pages together up to maxResults limit. - * auto: true - * }; - * - * bigquery.query(myQuery, function(err, results) { - * if (err) { - * // An error occurred. - * } - * if (!results.jobCompleted) { - * // No results because job still going... - * } else { - * // Results is just what was returned from BigQuery for now. - * // TODO: Is there a better format that we can return results in? - * } - * }); - * - * @param {object} options - The configuration object. - * @param {function} callback - The callback function. - */ -BigQuery.prototype.query = function(options, callback) { - throw new Error('Not implemented.'); -}; - /** * Make a new request object from the provided arguments and wrap the callback * to intercept non-successful responses. diff --git a/lib/bigquery/table.js b/lib/bigquery/table.js index 8d76fc233c6..47030a74a8f 100644 --- a/lib/bigquery/table.js +++ b/lib/bigquery/table.js @@ -20,6 +20,8 @@ 'use strict'; +var through = require('through2'); + /** * Create a Table object. * @@ -28,100 +30,110 @@ * * @param {string} tableId - The id of the table. */ -function Table(tableId) { - if (!(this instanceof Table)) { - return new Table(tableId); - } +function Table(dataset, tableId) { + this.dataset = dataset; this.id = tableId; } /** - * Set the metadata on the table. + * Copy data from one table to another, optionally creating that table. * - * @todo Figure out what metadata can *actually* be set. - * @todo Can columns be added? Removed? + * @param {object|string} options - The destination table or configuration + * object. + * @param {function} callback - The callback function. * - * @param {object} metadata - The metadata key/value object to set. - * @param {Function} callback - The callback function. + * @example + * myTable.copy(destTable, function(err, job) { + * // Job created to copy data. + * }); + * + * var options = { + * dest: destTable, + * allowCreate: false // default: true + * }; + * + * myTable.copy(options, function(err, job) { + * // Job created to copy data. + * }); */ -Table.prototype.setMetadata = function(metadata, callback) { +Table.prototype.copy = function(options, callback) { throw new Error('Not implemented.'); }; /** - * Return the metadata associated with the Table. - * - * @param {Function} callback - The callback function. + * Query the data from this table. * - * @example - * - * myTable.getMetadata(function(err, metadata) { - * // Use Table metadata here. - * }); + * @param {string} query - SQL query. + * @return {ReadableStream} */ -Table.prototype.getMetadata = function(callback) { - throw new Error('Not implemented.'); +Table.prototype.createReadStream = function(query) { + var stream = through.obj(); + runQuery(query); + return stream; + + function runQuery(query) { + this.makeReq_('', '', '', '', function(err, resp) { + if (err) { + stream.emit('error', err); + stream.end(); + return; + } + stream.push(resp.results); + if (resp.moreResults) { + runQuery(newQuery); + } else { + stream.end(); + } + }); + } }; /** - * Load data from a filename, gs:// url, readable stream, or raw string. - * By loading data this way, you create a load job that will run your - * data load asynchronously. If you would like instantaneous access to - * your data in BigQuery, insert it using Table#insert(). - * - * @param {object} options - The configuration object. - * @param {Function} callback - The callback function. + * https://cloud.google.com/bigquery/loading-data-post-request#resumable * - * TODO: Decide on param key names here for different types of data input. - * var options = { - * url: 'gs://my-bucket/my-data.csv', - * filename: '/Users/ryanseys/my-data.csv', - * data: 'hello,world,123', + * @param {object=} metadata - Metadata to send with the upload. + * @return {WritableStream} * - * format: 'csv', // or json - * delimiter: ';', - * skipHeaderRows: 1, - * numErrorsAllowed: 0, - * allowQuotedNewlines: false, - * allowJaggedRows: false, - * ignoreUnknowns: false - * // these options will change as necessary - * }; + * @example + * var kittens = bq.dataset('kittens'); * + * fs.createReadStream('/kittens.csv') + * .pipe(kittens.createWriteStream()); */ -Table.prototype.load = function(options, callback) { +Table.prototype.createWriteStream = function(metadata) { throw new Error('Not implemented.'); }; /** - * Create a write stream out of the table to allow data to be loaded - * via a piped stream. + * Delete a table and all its data. * - * @return {WritableStream} The writable stream object. + * @param {object=} options - Configuration object. + * @param {function} callback - The callback function. * * @example - * - * var bigDataTable = myTable.createWriteStream({ type: 'csv' }); - * fs.createReadStream('big-data.csv').pipe(bigDataTable); + * myTable.delete(function(err) { + * // Deletes table and all its data. + * }); */ -Table.prototype.createWriteStream = function(options) { +Table.prototype.delete = function(options, callback) { throw new Error('Not implemented.'); }; /** * Export table to Google Cloud Storage. * - * @param {object} options - The configuration object. - * @param {Function} callback - The callback function. + * @param {object} options - The configuration object. + * @param {function} callback - The callback function. * * @example - * * var exportedFile = storage.bucket('my-bucket').file('export.csv'); + * * var options = { * format: 'json', - * gzip: true, // or false (default) + * gzip: true, // default: false * dest: exportedFile // or 'gs://my-bucket/export.csv' (accepts wildcards) * }; + * * myTable.export(options, function(err) { * // Exported! * }); @@ -131,35 +143,34 @@ Table.prototype.export = function(options, callback) { }; /** - * Delete a table and all its data. + * Return the metadata associated with the Table. * - * @param {object=} options - Configuration object. - * @param {Function} callback - The callback function. + * @param {function} callback - The callback function. * * @example - * - * myTable.delete(function(err) { - * // Deletes table and all its data. + * myTable.getMetadata(function(err, metadata) { + * // Use Table metadata here. * }); */ -Table.prototype.delete = function(options, callback) { +Table.prototype.getMetadata = function(callback) { throw new Error('Not implemented.'); }; /** * Retrieves table data from a specified set of rows. * - * @param {object=} options - The configuration object. - * @param {Function} callback - The callback function. + * @todo We should automatically handle pagination. * - * @example + * @param {object=} options - The configuration object. + * @param {function} callback - The callback function. * + * @example * var options = { * maxResults: 123, * startIndex: 0, * pageToken: 'token' - * // TODO: We should automatically handle pagination. * }; + * myTable.getRows(options, function(err, rows) { * // Use rows here. * }); @@ -176,11 +187,10 @@ Table.prototype.getRows = function(options, callback) { * The one advantage to using this method is that data is immediately * available in BigQuery, where as Table#load()-ing may take time to process. * - * @param {object} options - The configuration object. - * @param {Function} callback - The callback function. + * @param {object} options - The configuration object. + * @param {function} callback - The callback function. * * @example - * * myTable.insert({ dept: 'FILM', code: '1001', capacity: 42 }, function(err) { * // Inserted the row. * }); @@ -199,26 +209,45 @@ Table.prototype.insert = function(options, callback) { }; /** - * Copy data from one table to another, optionally creating that table. + * Load data from a filename, gs:// url, readable stream, or raw string. By + * loading data this way, you create a load job that will run your data load + * asynchronously. If you would like instantaneous access to your data in + * BigQuery, insert it using Table#insert(). * - * @param {object|string} options - The dest table or configuration object. - * @param {Function} callback - The callback function. + * @todo Decide on param key names here for different types of data input. * - * @example - * - * myTable.copy(destTable, function(err, job) { - * // Job created to copy data. - * }); + * @param {object} options - The configuration object. + * @param {function} callback - The callback function. * * var options = { - * dest: destTable, - * allowCreate: false // default is true + * url: 'gs://my-bucket/my-data.csv', + * filename: '/Users/ryanseys/my-data.csv', + * data: 'hello,world,123', + * + * format: 'csv', // or json + * delimiter: ';', + * skipHeaderRows: 1, + * numErrorsAllowed: 0, + * allowQuotedNewlines: false, + * allowJaggedRows: false, + * ignoreUnknowns: false + * // these options will change as necessary * }; - * myTable.copy(options, function(err, job) { - * // Job created to copy data. - * }); */ -Table.prototype.copy = function(options, callback) { +Table.prototype.load = function(options, callback) { + throw new Error('Not implemented.'); +}; + +/** + * Set the metadata on the table. + * + * @todo Figure out what metadata can *actually* be set. + * @todo Can columns be added? Removed? + * + * @param {object} metadata - The metadata key/value object to set. + * @param {Function} callback - The callback function. + */ +Table.prototype.setMetadata = function(metadata, callback) { throw new Error('Not implemented.'); }; diff --git a/package.json b/package.json index 1b9b57161d3..d3a7f263002 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,8 @@ "gapitoken": "^0.1.3", "node-uuid": "^1.4.1", "protobufjs": "^3.4.0", - "request": "^2.39.0" + "request": "^2.39.0", + "through2": "^0.6.3" }, "devDependencies": { "async": "^0.9.0", diff --git a/test/bigquery/dataset.js b/test/bigquery/dataset.js new file mode 100644 index 00000000000..26a7a71b91b --- /dev/null +++ b/test/bigquery/dataset.js @@ -0,0 +1,337 @@ +/** + * Copyright 2014 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*global describe, it, beforeEach */ + +'use strict'; + +var assert = require('assert'); +var BigQuery = require('../../lib/bigquery'); +var Dataset = require('../../lib/bigquery/dataset'); +var Table = require('../../lib/bigquery/table'); + +describe('BigQuery', function() { + var DATASET_ID = 'kittens'; + var ds; + + beforeEach(function() { + ds = new BigQuery({ + keyFilename: '/Users/stephen/dev/keyfile.json', + projectId: 'nth-circlet-705' + }).dataset(DATASET_ID); + }); + + beforeEach(function() { + return; + ds = new Dataset(DATASET_ID); + ds.makeReq_ = function(method, path, query, body, callback) { + callback(); + }; + }); + + describe('createTable', function() { + var SCHEMA_OBJECT = { + fields: [ + { name: 'id', type: 'integer' }, + { name: 'breed', type: 'string' }, + { name: 'name', type: 'string' }, + { name: 'dob', type: 'timestamp' } + ] + }; + var SCHEMA_STRING = 'id:integer,breed,name,dob:timestamp'; + var TABLE = 'kittens'; + + it('should create a table', function(done) { + ds.makeReq_ = function(method, path, query, body) { + assert.equal(method, 'POST'); + assert.equal(path, '/tables'); + assert.strictEqual(query, null); + assert.deepEqual(body.schema, SCHEMA_OBJECT); + assert.equal(body.tableReference.datasetId, DATASET_ID); + assert.equal(body.tableReference.projectId, ds.bigQuery.projectId); + assert.equal(body.tableReference.tableId, TABLE); + done(); + }; + ds.createTable({ id: TABLE, schema: SCHEMA_OBJECT }, assert.ifError); + }); + + it('should create a schema object from a string', function(done) { + ds.makeReq_ = function(method, path, query, body) { + assert.deepEqual(body.schema, SCHEMA_OBJECT); + done(); + }; + ds.createTable({ id: TABLE, schema: SCHEMA_STRING }, assert.ifError); + }); + + it('should return an error to the callback', function(done) { + var error = new Error('Error.'); + ds.makeReq_ = function(method, path, query, body, callback) { + callback(error); + }; + ds.createTable({ id: TABLE, schema: SCHEMA_OBJECT }, function(err) { + assert.equal(err, error); + done(); + }); + }); + + it('should return a Table object', function(done) { + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, { tableReference: { tableId: TABLE } }); + }; + ds.createTable({ id: TABLE, schema: SCHEMA_OBJECT }, function(e, table) { + assert.ifError(e); + assert(table instanceof Table); + done(); + }); + }); + + it('should assign metadata to the Table object', function(done) { + var metadata = { + a: 'b', + c: 'd', + tableReference: { tableId: TABLE } + }; + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, metadata); + }; + ds.createTable({ id: TABLE, schema: SCHEMA_OBJECT }, function(e, table) { + assert.ifError(e); + assert.deepEqual(table.metadata, metadata); + done(); + }); + }); + }); + + describe('delete', function() { + it('should delete the dataset via the api', function(done) { + ds.makeReq_ = function(method, path, query, body) { + assert.equal(method, 'DELETE'); + assert.equal(path, ''); + assert.deepEqual(query, { deleteContents: false }); + assert.strictEqual(body, null); + done(); + }; + ds.delete(assert.ifError); + }); + + it('should allow a force delete', function(done) { + ds.makeReq_ = function(method, path, query) { + assert.deepEqual(query, { deleteContents: true }); + done(); + }; + ds.delete({ force: true }, assert.ifError); + }); + + it('should execute callback when done', function(done) { + ds.makeReq_ = function(method, path, query, body, callback) { + callback(); + }; + ds.delete(done); + }); + + it('should pass error to callback', function(done) { + var error = new Error('Error.'); + ds.makeReq_ = function(method, path, query, body, callback) { + callback(error); + }; + ds.delete(function(err) { + assert.equal(err, error); + done(); + }); + }); + }); + + describe('getMetadata', function() { + it('should get metadata from api', function(done) { + ds.makeReq_ = function(method, path, query, body) { + assert.equal(method, 'GET'); + assert.equal(path, ''); + assert.strictEqual(query, null); + assert.strictEqual(body, null); + done(); + }; + ds.getMetadata(assert.ifError); + }); + + it('should execute callback with error', function(done) { + var error = new Error('Error.'); + ds.makeReq_ = function(method, path, query, body, callback) { + callback(error); + }; + ds.getMetadata(function(err) { + assert.equal(err, error); + done(); + }); + }); + + describe('metadata', function() { + var METADATA = { a: 'b', c: 'd' }; + + beforeEach(function() { + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, METADATA); + }; + }); + + it('should update metadata on Dataset object', function(done) { + ds.getMetadata(function(err) { + assert.ifError(err); + assert.deepEqual(ds.metadata, METADATA); + done(); + }); + }); + + it('should execute callback with metadata', function(done) { + ds.getMetadata(function(err, metadata) { + assert.ifError(err); + assert.deepEqual(metadata, METADATA); + done(); + }); + }); + }); + }); + + describe('getTables', function() { + it('should get tables from the api', function(done) { + ds.makeReq_ = function(method, path, query, body) { + assert.equal(method, 'GET'); + assert.equal(path, '/tables'); + assert.deepEqual(query, {}); + assert.strictEqual(body, null); + done(); + }; + ds.getTables(assert.ifError); + }); + + it('should accept query', function(done) { + var queryObject = { maxResults: 8, pageToken: 'token' }; + ds.makeReq_ = function(method, path, query) { + assert.deepEqual(query, queryObject); + done(); + }; + ds.getTables(queryObject, assert.ifError); + }); + + it('should return error to callback', function(done) { + var error = new Error('Error.'); + ds.makeReq_ = function(method, path, query, body, callback) { + callback(error); + }; + ds.getTables(function(err) { + assert.equal(err, error); + done(); + }); + }); + + it('should return Table objects', function(done) { + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, { tables: [{ id: 'tableName' }] }); + }; + ds.getTables(function(err, tables) { + assert.ifError(err); + assert(tables[0] instanceof Table); + done(); + }); + }); + + it('should assign metadata to the Table objects', function(done) { + var tableObjects = [{ a: 'b', c: 'd', id: 'tableName' }]; + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, { tables: tableObjects }); + }; + ds.getTables(function(err, tables) { + assert.ifError(err); + assert(tables[0].metadata, tableObjects[0]); + done(); + }); + }); + + it('should return token if more results exist', function(done) { + var token = 'token'; + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, { nextPageToken: token }); + }; + ds.getTables(function(err, tables, nextQuery) { + assert.deepEqual(nextQuery, { + pageToken: token + }); + done(); + }); + }); + }); + + describe('setMetadata', function() { + var METADATA = { a: 'b', c: 'd' }; + + it('should send request to the api', function(done) { + ds.makeReq_ = function(method, path, query, body) { + assert.equal(method, 'PUT'); + assert.equal(path, ''); + assert.strictEqual(query, null); + assert.deepEqual(body, METADATA); + done(); + }; + ds.setMetadata(METADATA, assert.ifError); + }); + + it('should execute callback with error', function(done) { + var error = new Error('Error.'); + ds.makeReq_ = function(method, path, query, body, callback) { + callback(error); + }; + ds.setMetadata(METADATA, function(err) { + assert.equal(err, error); + done(); + }); + }); + + describe('metadata', function() { + beforeEach(function() { + ds.makeReq_ = function(method, path, query, body, callback) { + callback(null, METADATA); + }; + }); + + it('should update metadata on Dataset object', function(done) { + ds.setMetadata(METADATA, function(err) { + assert.ifError(err); + assert.deepEqual(ds.metadata, METADATA); + done(); + }); + }); + + it('should execute callback with metadata', function(done) { + ds.setMetadata(METADATA, function(err, metadata) { + assert.ifError(err); + assert.deepEqual(metadata, METADATA); + done(); + }); + }); + }); + }); + + describe('table', function() { + it('should return a Table object', function() { + var tableId = 'tableId'; + var table = ds.table(tableId); + assert(table instanceof Table); + assert.equal(table.id, tableId); + }); + }); + + describe('makeReq_', function() { + }); +}); diff --git a/test/bigquery/index.js b/test/bigquery/index.js index 315b0b344ab..4595c546d55 100644 --- a/test/bigquery/index.js +++ b/test/bigquery/index.js @@ -180,7 +180,7 @@ describe('BigQuery', function() { describe('getJobs', function() { it('should get jobs from the api', function(done) { - bq.makeReq_ = function(method, path, query, body, callback) { + bq.makeReq_ = function(method, path, query, body) { assert.equal(method, 'GET'); assert.equal(path, '/jobs'); assert.deepEqual(query, {}); @@ -262,7 +262,5 @@ describe('BigQuery', function() { }); }); - describe('query', function() {}); - describe('makeReq_', function() {}); });