Skip to content

Commit

Permalink
First draft of copyTable sample
Browse files Browse the repository at this point in the history
  • Loading branch information
Ace Nassri committed Sep 1, 2016
1 parent 4d63879 commit 6e6cf48
Show file tree
Hide file tree
Showing 3 changed files with 159 additions and 22 deletions.
70 changes: 53 additions & 17 deletions bigquery/system-test/tables.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,37 +36,49 @@ var options = {
schema: 'Name:string, Age:integer, Weight:float, IsMagic:boolean',
rows: rows
};
var copyOptions = {
srcDataset: options.dataset,
srcTable: options.table,
destDataset: generateUuid(),
destTable: generateUuid()
};

describe('bigquery:tables', function () {
before(function (done) {
// Create bucket
storage.createBucket(options.bucket, function (err, bucket) {
assert.ifError(err, 'bucket creation succeeded');

// Upload data.csv
bucket.upload(options.localFilePath, function (err) {
assert.ifError(err, 'file upload succeeded');

// Create dataset
bigquery.createDataset(options.dataset, function (err, dataset) {
assert.ifError(err, 'dataset creation succeeded');
done();
// Create srcDataset
bigquery.createDataset(copyOptions.srcDataset, function (err) {
assert.ifError(err, 'srcDataset creation succeeded');
// Create destDataset
bigquery.createDataset(copyOptions.destDataset, function (err) {
assert.ifError(err, 'destDataset creation succeeded');
done();
});
});
});
});
});

after(function (done) {
// Delete testing dataset/table
bigquery.dataset(options.dataset).delete({ force: true }, function () {
// Delete files
storage.bucket(options.bucket).deleteFiles({ force: true }, function (err) {
if (err) {
return done(err);
}
// Delete bucket
setTimeout(function () {
storage.bucket(options.bucket).delete(done);
}, 2000);
// Delete srcDataset
bigquery.dataset(copyOptions.srcDataset).delete({ force: true }, function () {
// Delete destDataset
bigquery.dataset(copyOptions.destDataset).delete({ force: true }, function () {
// Delete files
storage.bucket(options.bucket).deleteFiles({ force: true }, function (err) {
if (err) {
return done(err);
}
// Delete bucket
setTimeout(function () {
storage.bucket(options.bucket).delete(done);
}, 2000);
});
});
});
});
Expand Down Expand Up @@ -157,6 +169,30 @@ describe('bigquery:tables', function () {
});
});

describe('copyTable', function () {
it('should copy a table between datasets', function (done) {
program.copyTable(copyOptions, function (err, metadata) {
assert.equal(err, null);
assert.deepEqual(metadata.status, { state: 'DONE' });

bigquery.dataset(copyOptions.srcDataset).table(copyOptions.srcTable).exists(
function (err, exists) {
assert.equal(err, null);
assert.equal(exists, true, 'srcTable exists');

bigquery.dataset(copyOptions.destDataset).table(copyOptions.destTable).exists(
function (err, exists) {
assert.equal(err, null);
assert.equal(exists, true, 'destTable exists');
done();
}
);
}
);
});
});
});

describe('deleteTable', function () {
it('should delete table', function (done) {
program.deleteTable(options, function (err) {
Expand Down
45 changes: 45 additions & 0 deletions bigquery/tables.js
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,37 @@ function deleteTable (options, callback) {
}
// [END delete_table]

// [START copy_table]
/**
* Create a copy of an existing table
*
* @param {object} options Configuration options.
* @param {string} options.srcDataset The source dataset ID.
* @param {string} options.srcTable The source table ID.
* @param {string} options.destDataset The destination dataset ID.
* @param {string} options.destTable The destination table ID. Will be created if it doesn't exist.
* @param {function} callback The callback function.
*/
function copyTable (options, callback) {
var srcTable = bigquery.dataset(options.srcDataset).table(options.srcTable);
var destTable = bigquery.dataset(options.destDataset).table(options.destTable);

srcTable.copy(destTable, function (err, job) {
if (err) {
return callback(err);
}

console.log('Started job: %s', job.id);
job
.on('error', callback)
.on('complete', function (metadata) {
console.log('Completed job: %s', job.id);
return callback(null, metadata);
});
});
}
// [END copy_table]

// [START import_file]
/**
* Load a csv file into a BigQuery table.
Expand Down Expand Up @@ -219,6 +250,7 @@ var program = module.exports = {
importFile: importFile,
exportTableToGCS: exportTableToGCS,
insertRowsAsStream: insertRowsAsStream,
copyTable: copyTable,
main: function (args) {
// Run the command-line program
cli.help().strict().parse(args).argv;
Expand All @@ -236,6 +268,15 @@ cli
.command('delete <dataset> <table>', 'Delete a table in the specified dataset.', {}, function (options) {
program.deleteTable(utils.pick(options, ['dataset', 'table']), utils.makeHandler());
})
.command('copy <srcDataset> <srcTable> <destDataset> <destTable>',
'Make a copy of an existing table.', {},
function (options) {
program.copyTable(
utils.pick(options, ['srcDataset', 'srcTable', 'destDataset', 'destTable']),
utils.makeHandler()
);
}
)
.command('import <dataset> <table> <file>', 'Import data from a local file or a Google Cloud Storage file into BigQuery.', {
bucket: {
alias: 'b',
Expand Down Expand Up @@ -325,6 +366,10 @@ cli
'node $0 insert my_dataset my_table json_file',
'Insert the JSON objects contained in json_file (one per line) into my_dataset:my_table.'
)
.example(
'node $0 copy src_dataset src_table dest_dataset dest_table',
'Copy src_dataset:src_table to dest_dataset:dest_table.'
)
.wrap(100)
.recommendCommands()
.epilogue('For more information, see https://cloud.google.com/bigquery/docs');
Expand Down
66 changes: 61 additions & 5 deletions bigquery/test/tables.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,13 @@
var proxyquire = require('proxyquire').noCallThru();
var bucket = 'bucket';
var file = 'file';
var job = 'job';
var jobId = 'job';
var dataset = 'dataset';
var table = 'table';
var srcDataset = dataset;
var srcTable = table;
var destDataset = dataset + '_dest';
var destTable = table + '_dest';
var format = 'JSON';
var schema = 'schema';
var jsonArray = [
Expand Down Expand Up @@ -46,12 +50,14 @@ function getSample () {
var fileMock = {};
var metadataMock = { status: { state: 'DONE' } };
var jobMock = {
id: job,
id: jobId,
getMetadata: sinon.stub().yields(null, metadataMock),
on: sinon.stub().returnsThis()
};
jobMock.on.withArgs('complete').yields(metadataMock);
var tableMock = {
export: sinon.stub().yields(null, jobMock),
copy: sinon.stub().yields(null, jobMock),
delete: sinon.stub().yields(null),
import: sinon.stub().yields(null, jobMock),
insert: sinon.stub().yields(null, errorList)
Expand Down Expand Up @@ -220,7 +226,6 @@ describe('bigquery:tables', function () {
table: table,
file: file
};
sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata);

sample.program.importFile(options, callback);

Expand All @@ -243,7 +248,6 @@ describe('bigquery:tables', function () {
bucket: bucket,
format: format
};
sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata);

sample.program.importFile(options, callback);

Expand All @@ -269,6 +273,45 @@ describe('bigquery:tables', function () {
});
});

describe('copyTable', function () {
var options = {
srcDataset: srcDataset,
srcTable: srcTable,
destDataset: destDataset,
destTable: destTable
};

it('should copy a table', function () {
var sample = getSample();
var callback = sinon.stub();

sample.program.copyTable(options, callback);

assert.equal(sample.mocks.table.copy.calledOnce, true);
assert.deepEqual(
sample.mocks.table.copy.firstCall.args.slice(0, -1),
[sample.mocks.table]
);
assert.equal(callback.calledOnce, true);
assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]);
assert.equal(console.log.calledTwice, true);
assert.equal(console.log.calledWith('Started job: %s', sample.mocks.job.id), true);
assert.equal(console.log.calledWith('Completed job: %s', sample.mocks.job.id), true);
});

it('should handle error', function () {
var error = new Error('error');
var sample = getSample();
var callback = sinon.stub();
sample.mocks.table.copy.yields(error);

sample.program.copyTable(options, callback);

assert.equal(callback.calledOnce, true);
assert.deepEqual(callback.firstCall.args, [error]);
});
});

describe('exportTableToGCS', function () {
it('should export to a table', function () {
var sample = getSample();
Expand All @@ -281,7 +324,6 @@ describe('bigquery:tables', function () {
gzip: true
};
var callback = sinon.stub();
sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata);

sample.program.exportTableToGCS(options, callback);

Expand Down Expand Up @@ -389,6 +431,20 @@ describe('bigquery:tables', function () {
}]);
});

it('should call copyTable', function () {
var program = getSample().program;
program.copyTable = sinon.stub();

program.main(['copy', srcDataset, srcTable, destDataset, destTable]);
assert.equal(program.copyTable.calledOnce, true);
assert.deepEqual(program.copyTable.firstCall.args.slice(0, -1), [{
srcDataset: srcDataset,
srcTable: srcTable,
destDataset: destDataset,
destTable: destTable
}]);
});

it('should call exportTableToGCS', function () {
var program = getSample().program;
program.exportTableToGCS = sinon.stub();
Expand Down

0 comments on commit 6e6cf48

Please sign in to comment.