Node.js to AWS S3, my introduction guide to the SDK

Dynamic languages may seem to be error prone in some ways because they often don’t have something like a compile time check and so on.

Take the case of Node.js, it has a high level of abstraction (you don’t have to face the bare metal of your machine) and its simplicity and conciseness of syntax allow us to write a lot less code.

The JavaScript programming language

Verbose code like the one written in Java leaves not doubt about what the program intends to do yes, but it can be tiring to write or even to read; although many IDEs can help with their autocomplete.

With Node.js instead, like many other dynamic languages, you can write less code and do more magic. And as expert programmers say, the less code you write, the less bugs might be on it.

Node.js can also be powerful, and in some way let you decide on lower level details. It has also a damn amout of libraries, for any needs. Node has also a lot of performance thanks to its asyncronous mode.

This tutorial is focused on AWS S3, as a requirement please install and configure the AWS CLI putting in your credentials and avalability zone.

Let’s begin by installing node and then installing our SDK through means of npm, the node package manager:

$ npm install aws-sdk

As a starting point we have to create an empty Bucket, let’s do it by running create_buckets.js:

var AWS = require('aws-sdk');
var uuid = require('uuid');
var standard_input = process.stdin;


standard_input.setEncoding('utf-8');

AWS.config.update({region: 'us-west-2'});
s3 = new AWS.S3({apiVersion: '2006-03-01'});


console.log("Enter a name for your Bucket:");


standard_input.on('data', function (name) {

	var bucketName = name.trim() + '-' + uuid.v4();


	var bucketParams = {
	  Bucket : bucketName,
	  ACL : 'public-read'
	};


	s3.createBucket(bucketParams, function(err, data) {

	  if (err) {
		console.log("Error:", err);
	  } else {
		console.log("Success:", data.Location);
	  }
	  
	  process.exit();

	});

});

The second thing we should do is to show our newly created Buckets, we can do it by means of this list_buckets.js:

var AWS = require('aws-sdk');


AWS.config.update({region: 'us-west-2'});
s3 = new AWS.S3({apiVersion: '2006-03-01'});


s3.listBuckets(function(err, buck) {
	
  if (err) {
	  
    console.log("Error:", err);
	
  } else {
	  
	  if (buck.Buckets.length > 0) {
		
		buck.Buckets.forEach(function(bucket) {
			
			console.log("Here are your Buckets:", bucket);
			
			var bucketParams = {
			  Bucket : bucket.Name,
			};


			s3.listObjects(bucketParams, function(err, objects) {
			  if (err) {
				console.log("Error:", err);
			  } else {
				console.log("Objects:", objects);
			  }
			});		
		});		
		
	  } else {
		console.log("You have no Buckets yet.");
	  }
	  
  }
  
});

We now want the capability of putting some files on our Buckets, first let’s create one:

$ touch testfile.txt

and then we will run this put_files.js node application:

var AWS = require('aws-sdk');

var fs = require('fs');
var path = require('path');

var standard_input = process.stdin;


standard_input.setEncoding('utf-8');

AWS.config.update({region: 'us-west-2'});
s3 = new AWS.S3({apiVersion: '2006-03-01'});


let askForBucketName = function () {
    return new Promise(function(resolve, reject) {
		
		console.log("Please enter the name of your Bucket:");
		
		standard_input.on('data', function (name) {
			resolve(name.trim());
		});
    });
}


askForBucketName().then(function(bucketName) {
	
	console.log("Please enter the name of your file:");

	standard_input.on('data', function (fileName) {
		
		var key = path.basename(fileName.trim());
		var body = fs.createReadStream(fileName.trim());

		body.on('error', function(err) {
		  console.log('File Error:', err);
		});

	
		var uploadParams = {Bucket: bucketName, Key: key, Body: body};
		
		s3.upload (uploadParams, function (err, data) {
			
		  if (err) {
			console.log("Error:", err);
		  } if (data) {
			console.log("Upload Success:", data.Location);
		  }
		  
		  process.exit();
		  
		});		
		
	});
	
});

As you can see, beginning by this node file, we have adopted the promise paradigm of coding and made the code cleaner. Instead, we would have had a callback hell, which is writing asyncronous code.

We could also have used async/await, which would have made our code even more imperative. But it’s just a question of syntactic sugar; for the sake of this quick example, it is sufficient.

The advantages of this approach will be more evident in the subsequent node application files, for example let’s run our delete_buckets.js:

var AWS = require('aws-sdk');
var standard_input = process.stdin;


standard_input.setEncoding('utf-8');

AWS.config.update({region: 'us-west-2'});
s3 = new AWS.S3({apiVersion: '2006-03-01'});


console.log("Please enter the name of the Bucket to delete:");


standard_input.on('data', function (name) {
	
	var bucketParams = {
	  Bucket : name.trim()
	};


	let returnObjects = function() {
		return new Promise(function(resolve, reject) {
			s3.listObjects(bucketParams, function(err, objects) {
				if (err) {
					console.log("Error:", err);
				} else {
					resolve(objects);
				}
			});
		});
	}


	let emptyBucket = function(objects) {
		return new Promise(function(resolve, reject) {			
			objects.Contents.forEach(function(content) {
				
				let delBuckParams = {
					Bucket: name.trim(),
					Key: content.Key
				}
				
				s3.deleteObject(delBuckParams, function(err, data) {
					if (err) console.log(err, err.stack);
				});
			});
		
			resolve("done");
		
		});
	}


	returnObjects()
	.then(function(objects) {emptyBucket(objects)})
	.then(function() {
		s3.deleteBucket(bucketParams, function(err, data) {

		  if (err) {
			console.log("Error:", err);
		  } else {
			console.log("The following Bucket has been deleted:", name.trim());
		  }
		  
		  process.exit();

		});
	});

});

The last thing is a way to delete a single file from our Buckets, let’s do that running this node app, delete_files.js:

var AWS = require('aws-sdk');
var standard_input = process.stdin;


standard_input.setEncoding('utf-8');

AWS.config.update({region: 'us-west-2'});
s3 = new AWS.S3({apiVersion: '2006-03-01'});


let askForBucketName = function () {
    return new Promise(function(resolve, reject) {
		
		console.log("Please enter the name of your Bucket:");
		
		standard_input.on('data', function (name) {
			resolve(name.trim());
		});
    });
}


askForBucketName().then(function(bucketName) {
	
	console.log("Please enter the name of your file:");

	standard_input.on('data', function (fileName) {
		
		let params = {
			Bucket: bucketName,
			Key: fileName.trim()
		}
		
		s3.deleteObject(params, function(err, data) {
			
			if (err) {
				console.log(err, err.stack);
			} else {
				console.log("The following file has been deleted:", fileName);	
			}
			
			process.exit();
		});
	});
});

This tutorial is based on a Windows machine, with the help of Git Bash as a minimal Linux shell. All that to make the output more readable, in that this kind of shell has a coloured output text.

As always, you can refer to a GitHub repository for this tutorial and you can compare Node.js AWS SDK to other SDKs and see how less wordy it is, instead of Java for example.

Should you need more explanations, contact me in person and I’ll see you for the next tutorial, bye.

Did you like this post? Please comment here below and share it on your preferred social networks, thank you!

4 thoughts on “Node.js to AWS S3, my introduction guide to the SDK

  1. Hi thereā€¦
    my name is Akuti. I’d like to say my opinion on this.
    Why do you say Node.js is less wordy when functional Java has the same degree of conciseness?

  2. Thanks Akuti,
    yes, you have some reasons. But think of the fact that dynamic languages are a lot more flexible and allow you to be more imprecise when it comes to types.
    Obviuosly everything comes at a price, it’s just a question of tastes or how you are accustomed to get your job done.
    Bye

  3. I’m glad you have your opinion,
    even though I don’t perfectly agree with you.
    Thank you

Leave a Reply

Give me your opinion, I will be grateful.