Skip to content

Commit

Permalink
Merge pull request #2 from ElucidataInc/logging/add_logs
Browse files Browse the repository at this point in the history
[Fix]: Minor fixes
- Fix Callback issue 
- Add logs
- Emit a signal whenever a file is skipped during an upload operation
  • Loading branch information
rish9511 committed Jun 29, 2022
2 parents b03cca0 + 2aee052 commit 545ec2b
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 7 deletions.
74 changes: 69 additions & 5 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,19 @@ var mime = require('mime');
var StreamSink = require('streamsink');
var PassThrough = require('stream').PassThrough;


const log4js = require('log4js');
log4js.configure({
appenders: {
app: { type: 'file', filename: 'application.log', flags: 'w', maxLogSize: '15M'}
},
categories: {
default: { appenders: ['app'], level: 'warn' }
}
});

var logger = log4js.getLogger();

var MAX_PUTOBJECT_SIZE = 5 * 1024 * 1024 * 1024;
var MAX_DELETE_COUNT = 1000;
var MAX_MULTIPART_COUNT = 10000;
Expand Down Expand Up @@ -145,11 +158,14 @@ Client.prototype.uploadFile = function(params) {
var localFileSlicer = null;
var parts = [];

logger.info(`Uploading file - ${localFile}`);
openFile();

return uploader;

function handleError(err) {
logger.error(`Issue while uploading local file - ${err}`);
logger.error(`${err.stack}`);
if (localFileSlicer) {
localFileSlicer.unref();
localFileSlicer = null;
Expand Down Expand Up @@ -200,6 +216,7 @@ Client.prototype.uploadFile = function(params) {
handleError(err);
return;
}
logger.info('Uploading using multipart upload');
startMultipartUpload(multipartUploadSize);
} else {
doWithRetry(tryPuttingObject, self.s3RetryCount, self.s3RetryDelay, onPutObjectDone);
Expand Down Expand Up @@ -261,7 +278,10 @@ Client.prototype.uploadFile = function(params) {
return function(cb) {
doWithRetry(tryUploadPart, self.s3RetryCount, self.s3RetryDelay, function(err, data) {
if (fatalError) return;
if (err) return handleError(err);
if (err) {
logger.error('Failed to upload part');
return handleError(err);
}
uploader.emit('part', data);
cb();
});
Expand Down Expand Up @@ -308,7 +328,13 @@ Client.prototype.uploadFile = function(params) {
inStream.pipe(multipartETag);
s3Params.Body = multipartETag;

let gotCallback = false
self.s3.uploadPart(extend({}, s3Params), function(err, data) {
if (gotCallback) {
logger.warn('AWS JS SDK called callback twice while uploading part');
return
}
gotCallback = true
pendCb();
if (fatalError || errorOccurred) return;
if (err) {
Expand Down Expand Up @@ -406,7 +432,13 @@ Client.prototype.uploadFile = function(params) {
inStream.pipe(multipartETag);
s3Params.Body = multipartETag;

let gotCallback = false
self.s3.putObject(s3Params, function(err, data) {
if (gotCallback) {
logger.warn('AWS JS SDK called callback twice while uploading object');
return
}
gotCallback = true
pendCb();
if (fatalError) return;
if (err) {
Expand Down Expand Up @@ -571,14 +603,18 @@ Client.prototype.listObjects = function(params) {
ee.emit('end');
});


ee.abort = function() {
abort = true;
};

return ee;

function findAllS3Objects(marker, prefix, cb) {
if (abort) return;
if (abort) {
logger.warn('Aborting find all s3 objects operation');
return;
}
doWithRetry(listObjects, self.s3RetryCount, self.s3RetryDelay, function(err, data) {
if (abort) return;
if (err) return cb(err);
Expand Down Expand Up @@ -1057,9 +1093,15 @@ function syncDir(self, params, directionIsToS3) {
s3ObjectCursor += 1;
uploadLocalFile();
} else {
logger.info(`Skipping this file - ${localFileStat.s3Path}`)
if(localFileStat.s3Path == s3Object.key) {
logger.info(`File ${localFileStat.s3Path} already present on S3`)
}
ee.emit('fileSkipped', localFileStat.size, localFileStat.path);
skipThisOne();
}
} else {
}
else {
if (!localFileStat) {
downloadS3Object();
} else if (!s3Object) {
Expand Down Expand Up @@ -1232,19 +1274,40 @@ function syncDir(self, params, directionIsToS3) {
}

function handleError(err) {
if (fatalError) return;
if (fatalError) {
logger.error('Fatal error in handleError');
if(err) {
logger.error(`${err.stack}`);
}
return;
}
if(err) {
logger.error(`${err}`);
logger.error(`${err.stack}`);
}
logger.warn('Setting fatal error to true');
fatalError = true;
ee.emit('error', err);
}

/*
Looks for existing files on S3. Files already present on
S3 are skipped
*/
function findAllS3Objects() {

logger.info('Starting to find objects on S3');
var finder = self.listObjects(listObjectsParams);
finder.on('error', handleError);
finder.on('error', function(err) {
logger.error(`Failed while finding objects on S3 - ${err}`);
handleError(err);
});
finder.on('data', function(data) {
if (fatalError) return;
ee.objectsFound += data.Contents.length;
ee.emit('progress');
data.Contents.forEach(function(object) {
logger.info(`Found S3 object - ${object.Key}`);
if(!object.Key.endsWith('/')) {
object.key = object.Key.substring(prefix.length);
allS3Objects.push(object);
Expand Down Expand Up @@ -1390,6 +1453,7 @@ function doWithRetry(fn, tryCount, delay, cb) {
if (tryIndex >= tryCount) {
cb(err);
} else {
logger.info('Retrying after 1 second');
setTimeout(tryOnce, delay);
}
}
Expand Down
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@elucidatainc/s3-node-client",
"version": "4.5.0",
"version": "4.5.1",
"description": "high level amazon s3 client. upload and download files and directories",
"main": "lib/index.js",
"scripts": {
Expand Down Expand Up @@ -41,7 +41,8 @@
"mkdirp": "~0.5.0",
"pend": "~1.2.0",
"rimraf": "~2.2.8",
"streamsink": "~1.2.0"
"streamsink": "~1.2.0",
"log4js": "^6.5.2"
},
"bugs": {
"url": "https://github.com/ElucidataInc/node-s3-client/issues"
Expand Down

0 comments on commit 545ec2b

Please sign in to comment.