diff --git a/lib/index.js b/lib/index.js index 8986cb5..d27611f 100644 --- a/lib/index.js +++ b/lib/index.js @@ -15,6 +15,19 @@ var mime = require('mime'); var StreamSink = require('streamsink'); var PassThrough = require('stream').PassThrough; + +const log4js = require('log4js'); +log4js.configure({ + appenders: { + app: { type: 'file', filename: 'application.log', flags: 'w', maxLogSize: '15M'} + }, + categories: { + default: { appenders: ['app'], level: 'warn' } + } +}); + +var logger = log4js.getLogger(); + var MAX_PUTOBJECT_SIZE = 5 * 1024 * 1024 * 1024; var MAX_DELETE_COUNT = 1000; var MAX_MULTIPART_COUNT = 10000; @@ -145,11 +158,14 @@ Client.prototype.uploadFile = function(params) { var localFileSlicer = null; var parts = []; + logger.info(`Uploading file - ${localFile}`); openFile(); return uploader; function handleError(err) { + logger.error(`Issue while uploading local file - ${err}`); + logger.error(`${err.stack}`); if (localFileSlicer) { localFileSlicer.unref(); localFileSlicer = null; @@ -200,6 +216,7 @@ Client.prototype.uploadFile = function(params) { handleError(err); return; } + logger.info('Uploading using multipart upload'); startMultipartUpload(multipartUploadSize); } else { doWithRetry(tryPuttingObject, self.s3RetryCount, self.s3RetryDelay, onPutObjectDone); @@ -261,7 +278,10 @@ Client.prototype.uploadFile = function(params) { return function(cb) { doWithRetry(tryUploadPart, self.s3RetryCount, self.s3RetryDelay, function(err, data) { if (fatalError) return; - if (err) return handleError(err); + if (err) { + logger.error('Failed to upload part'); + return handleError(err); + } uploader.emit('part', data); cb(); }); @@ -308,7 +328,13 @@ Client.prototype.uploadFile = function(params) { inStream.pipe(multipartETag); s3Params.Body = multipartETag; + let gotCallback = false self.s3.uploadPart(extend({}, s3Params), function(err, data) { + if (gotCallback) { + logger.warn('AWS JS SDK called callback twice while uploading part'); + return + } + gotCallback = true pendCb(); if (fatalError || errorOccurred) return; if (err) { @@ -406,7 +432,13 @@ Client.prototype.uploadFile = function(params) { inStream.pipe(multipartETag); s3Params.Body = multipartETag; + let gotCallback = false self.s3.putObject(s3Params, function(err, data) { + if (gotCallback) { + logger.warn('AWS JS SDK called callback twice while uploading object'); + return + } + gotCallback = true pendCb(); if (fatalError) return; if (err) { @@ -571,6 +603,7 @@ Client.prototype.listObjects = function(params) { ee.emit('end'); }); + ee.abort = function() { abort = true; }; @@ -578,7 +611,10 @@ Client.prototype.listObjects = function(params) { return ee; function findAllS3Objects(marker, prefix, cb) { - if (abort) return; + if (abort) { + logger.warn('Aborting find all s3 objects operation'); + return; + } doWithRetry(listObjects, self.s3RetryCount, self.s3RetryDelay, function(err, data) { if (abort) return; if (err) return cb(err); @@ -1057,9 +1093,15 @@ function syncDir(self, params, directionIsToS3) { s3ObjectCursor += 1; uploadLocalFile(); } else { + logger.info(`Skipping this file - ${localFileStat.s3Path}`) + if(localFileStat.s3Path == s3Object.key) { + logger.info(`File ${localFileStat.s3Path} already present on S3`) + } + ee.emit('fileSkipped', localFileStat.size, localFileStat.path); skipThisOne(); } - } else { + } + else { if (!localFileStat) { downloadS3Object(); } else if (!s3Object) { @@ -1232,19 +1274,40 @@ function syncDir(self, params, directionIsToS3) { } function handleError(err) { - if (fatalError) return; + if (fatalError) { + logger.error('Fatal error in handleError'); + if(err) { + logger.error(`${err.stack}`); + } + return; + } + if(err) { + logger.error(`${err}`); + logger.error(`${err.stack}`); + } + logger.warn('Setting fatal error to true'); fatalError = true; ee.emit('error', err); } + /* + Looks for existing files on S3. Files already present on + S3 are skipped + */ function findAllS3Objects() { + + logger.info('Starting to find objects on S3'); var finder = self.listObjects(listObjectsParams); - finder.on('error', handleError); + finder.on('error', function(err) { + logger.error(`Failed while finding objects on S3 - ${err}`); + handleError(err); + }); finder.on('data', function(data) { if (fatalError) return; ee.objectsFound += data.Contents.length; ee.emit('progress'); data.Contents.forEach(function(object) { + logger.info(`Found S3 object - ${object.Key}`); if(!object.Key.endsWith('/')) { object.key = object.Key.substring(prefix.length); allS3Objects.push(object); @@ -1390,6 +1453,7 @@ function doWithRetry(fn, tryCount, delay, cb) { if (tryIndex >= tryCount) { cb(err); } else { + logger.info('Retrying after 1 second'); setTimeout(tryOnce, delay); } } diff --git a/package.json b/package.json index c4cf925..f763206 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@elucidatainc/s3-node-client", - "version": "4.5.0", + "version": "4.5.1", "description": "high level amazon s3 client. upload and download files and directories", "main": "lib/index.js", "scripts": { @@ -41,7 +41,8 @@ "mkdirp": "~0.5.0", "pend": "~1.2.0", "rimraf": "~2.2.8", - "streamsink": "~1.2.0" + "streamsink": "~1.2.0", + "log4js": "^6.5.2" }, "bugs": { "url": "https://github.com/ElucidataInc/node-s3-client/issues"