Cool AWS Scripts
Most AWS S3 calls return AWS.Request object. :
s3.listObjects( {Bucket: bucket_name}, func_cb); // Operation kicks off.
// If no callback passed, operation is not kicked off.
request = s3.listObjects({Bucket: bucket_name})
If no callback is supplied you must call request.send()
For better readability, you can use :
request.on('success', okay_cb);
request.on('error', error_cb);
request.send(); // send only when you are ready.
request.on('success', okay_cb).on('error', error_cb).send()
You can use .promise() call on object to kick of operation and return promise. See https://aws.amazon.com/blogs/developer/support-for-promises-in-the-sdk/ This is useful to collect a list of promises and wait for all of them :
single_promise = Promise.all(some_collection.map(fn_returns_promise))
// This returns single promise given many promises.
single_promise.then(cb)
if (typeof Promise === 'undefined') {
AWS.config.setPromisesDependency(require('bluebird'));
}
var s3 = new AWS.S3({apiVersion: '2006-03-01', region: 'us-west-2'});
var ses = new AWS.SES({apiVersion: '2010-12-01', region: 'us-west-2'});
// Take a list of objects containing file data and send an email
var sendEmail = function sendEmail(files) {
var keys = files.map(function(file) {
return file.key;
});
var body = keys.join('n') + 'nnobjects were successfully uploaded.';
var params = {
Source: 'from@email.com',
Destination: {
ToAddresses: ['to@email.com']
},
Message: {
Subject: {
Data: 'Batch PutObject job completed'
},
Body: {
Text: {
Data: body
}
}
}
};
return ses.sendEmail(params).promise();
};
// Upload a list of files to an S3 bucket
var putBatch = function putBatch(bucket, files) {
// Make all the putObject calls immediately
// Will return rejected promise if any requests fail
return Promise.all(files.map(function(file) {
var params = {
Bucket: bucket,
Key: file.key,
Body: file.stream
};
return s3.putObject(params).promise();
}));
};
// Create streams for files
var fileNames = fs.readdirSync('/path/to/dir/');
var files = fileNames.map(function(fileName) {
return {
key: fileName,
stream: fs.createReadStream('/path/to/dir/' + fileName)
};
});
//Upload directory of files to S3 bucket and send an email on success
putBatch('myBucket', files)
.then(sendEmail.bind(null, files))
.catch(console.error.bind(console));
See https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html See https://stackoverflow.com/questions/25705067/using-async-waterfall-in-node-js
::
// Download the image from S3, transform, and upload to a different S3
bucket. async.waterfall([ function download(next) { // Download the
image from S3 into a buffer. s3.getObject({ Bucket: srcBucket, Key:
srcKey }, next); }, function transform(response, next) { ....}, function
transform2(response, next) { ....}, function transform3(response, next)
{ ....}, )
var myProm = new Promise(function(resolve, reject){
// Kicked off during Promise constructor itself ...
doSomethingAsync(function(err, result){
if(err){ reject(err); }
else{ resolve(result) }
});
})
.then(function(result){ return result; })
.catch(function(err){ return err; })
myProm.then() can be called multiple times later.
If promise is already *done*, it executes, then() immeidately.