Search code examples
node.jsamazon-web-servicesamazon-s3file-upload

AWS S3 uploading 0 B file - node.js


I am trying to upload a file to AWS S3 using [putObject][1] but it results in files of 0 byte size.

I do get successful response back from the putObject call.

Node.js code:

const aws = require("aws-sdk");
const s3 = new aws.S3();

module.exports = {
  upload: function(req, res, next) {
    console.log("Going to upload");
    console.log(req.files);
    let uploadFile = req.files.file;
    const s3PutParams = {
      Bucket: process.env.S3_BUCKET_NAME,
      Key: uploadFile.name,
      Body: uploadFile.data,
      ACL: "public-read"
    };

    const s3GetParams = {
      Bucket: process.env.S3_BUCKET_NAME,
      Key: uploadFile.name
    };

    console.log(s3PutParams);
    s3.putObject(s3PutParams, function(err, response) {
      if (err) {
        console.error(err);
      } else {
        console.log("Response is", response);
        var url = s3.getSignedUrl("getObject", s3GetParams);
        console.log("The URL is", url);
        res.json({
          returnedUrl: url,
          publicUrl: `https://${process.env.S3_BUCKET_NAME}.s3.amazonaws.com/${uploadFile.name}`
        });
      }
    });
  }
};

Testing through POSTMAN: enter image description here

Backend Console log enter image description here

Can anyone help me in figuring out what is wrong?

EDIT on 11/20: @EmmanuelNK helped in spotting the fact that Buffer.byteLength(req.files.file.data) is 0. He had the below questions:

Are you trying to write the whole buffer into memory or are you trying to stream it to s3?

Sorry if the answer is not to the point, still getting my feet wet. Basically I want to upload an image to S3 and then later use that URL to show it on a webpage. In other words like a photobucket

how you are using upload

For now I am just testing my backend code (posted in the question) using postman. Once I get that going, will have a file upload form on the front end calling this route.

Is that helpful? Thanks in advance for your help.


Solution

  • If you're using express-fileupload as the file uploading middleware, and you've set the useTempFiles option to true, keep in mind that your data file buffer will be empty (check usage), which correlates to the issue you're facing. To get around this, simply read the temp. file once more to get the intended file buffer.

    import fs from 'fs';
    // OR
    const fs = require('fs');
    
    // in your route
    let uploadFile = req.files.file;
    // THIS
    fs.readFile(uploadedFile.tempFilePath, (err, uploadedData) => {
        if (err) { throw err; }
    
        const s3PutParams = {
            Bucket: process.env.S3_BUCKET_NAME,
            Key: uploadFile.name,
            Body: uploadData, // <--- THIS
            ACL: "public-read"
        };
    
        const s3GetParams = {
            Bucket: process.env.S3_BUCKET_NAME,
            Key: uploadFile.name
        };
    
        console.log(s3PutParams);
        s3.putObject(s3PutParams, function(err, response) {
            if (err) {
                console.error(err);
                throw err;
            } else {
                console.log("Response is", response);
                var url = s3.getSignedUrl("getObject", s3GetParams);
                console.log("The URL is", url);
                res.json({
                    returnedUrl: url,
                    publicUrl: `https://${process.env.S3_BUCKET_NAME}.s3.amazonaws.com/${uploadFile.name}`
                });
            }
        });
    });