Writable stream which takes paths and uploads them to S3

npm install s3ify
14 downloads in the last week
31 downloads in the last month


Writable stream which takes paths and uploads them to S3.

dependency status


var s3ify = require('s3ify');
var fs = require('fs');
var path = require('path');

var s = s3ify({
  key: process.env.AWS_ACCESS_KEY,
  secret: process.env.AWS_SECRET_KEY,
  region: process.env.AWS_REGION,
  bucket: process.env.S3_BUCKET,

function format (file, cb) {
  fs.stat(file, function (err, stat) {
    if (err) cb(err);
    cb(null, stat.ctime.getTime().toString() + path.extname(file));

var src = 'img/hasselblad.jpg';
format(src, function (err, dest) {
  s.write({src: src, dest: dest});

s.on('skip', function (obj) {
  console.log('already uploaded', obj);

s.on('upload', function (obj) {
  console.log('uploaded', obj);

s.on('error', function (obj) {
  console.error('error', obj);

See example/recursive.js for an example which recursively mirrors a directory to S3 using recurse.


var s = s3ify(opts)

Return a writable stream which uploads file paths written to it. Handles plain path strings or {src, dest} objects if the S3 object name should differ from the source file path.

The following opts must be passed in:

  • opts.key - AWS access key ID.
  • opts.secret - AWS secret access key.
  • opts.region - AWS geographic region.
  • opts.bucket - Existing S3 bucket for uploads.


s.on('skip', function (obj) {})

For every file already existing in the bucket this event fires.

s.on('upload', function (obj) {})

For every file sucessfully uploaded to the bucket this event fires.

s.on('error', function (err) {})

For each error when checking existence of or uploading a file this event fires.



npm loves you