Search code examples
javascriptamazon-s3ecmascript-6filereaderaws-sdk-js

Send ArrayBuffer to S3 put to signedURL


I am progressively loading a file into a buffer, the buffer is valid, but the browser crashes when the ArrayBuffer is finished loading the file into it. What I need to do is to be able to send the pieces of the buffer buf = this.concatBuffers(buf, buffer); to the axios PUT request so I can progressively upload the file to s3, rather than load it into a single variable returned by the promise (as the memory gets exceeded).

How do I modify the link between readFileAsBuffer and the uploadFileToS3 method to do this?

This is my code so you can follow the process.

concatTypedArrays = (a, b) => {
  const c = new a.constructor(a.length + b.length);
  c.set(a, 0);
  c.set(b, a.length);
  return c;
};

concatBuffers = (a, b) =>
  this.concatTypedArrays(
    new Uint8Array(a.buffer || a),
    new Uint8Array(b.buffer || b),
  ).buffer;

readFileAsBuffer = file =>
  new Promise((resolve, reject) => {
    const fileReader = new FileReader();
    fileReader.file = file;
    let buf = new ArrayBuffer();
    const fileChunks = new FileChunker(file, 2097152);

    fileReader.readAsArrayBuffer(fileChunks.blob());

    fileReader.onload = e => {
      this.onProgress(fileChunks);
      const buffer = e.target.result;
      buf = this.concatBuffers(buf, buffer);

      if (fileChunks.hasNext()) {
        fileChunks.next();
        fileReader.readAsArrayBuffer(fileChunks.blob());
        return;
      }

      resolve(buf);
    };

    fileReader.onerror = err => {
      reject(err);
    };
  });

uploadFileToS3 = fileObject => {
  new Promise((resolve, reject) => {
    const decodedURL = decodeURIComponent(fileObject.signedURL);

    this.readFileAsBuffer(fileObject.fileRef).then(fileBuffer => {
      console.log(fileBuffer);
      axios
        .put(decodedURL, fileBuffer, {
          headers: {
            'Content-Type': fileObject.mime,
            'Content-MD5': fileObject.checksum,
            'Content-Encoding': 'UTF-8',
            'x-amz-acl': 'private',
          },
          onUploadProgress: progressEvent => {
            const { loaded, total } = progressEvent;
            const uploadPercentage = parseInt(
              Math.round((loaded * 100) / total),
              10,
            );
            this.setState({ uploadProgress: uploadPercentage });
            console.log(`${uploadPercentage}%`);

            if (uploadPercentage === 100) {
              console.log('complete');
            }
          },
        })
        .then(response => {
          resolve(response.data);
        })
        .catch(error => {
          reject(error);
        });
    });
  });
};

uploadAllFilesToS3 = () => {
  const { files } = this.state;
  new Promise((resolve, reject) => {
    Object.keys(files).map(idx => {
      this.uploadFileToS3(files[idx])
        .then(response => {
          this.setState({ files: [] });
          resolve(response.data);
        })
        .catch(error => {
          reject(error);
        });
    });
  });
};

calcFileMD5 = file =>
  new Promise((resolve, reject) => {
    const fileReader = new FileReader();
    fileReader.file = file;
    const spark = new SparkMD5.ArrayBuffer();
    const fileChunks = new FileChunker(file, 2097152);

    fileReader.readAsArrayBuffer(fileChunks.blob());

    fileReader.onload = e => {
      this.onProgress(fileChunks);
      const buffer = e.target.result;
      spark.append(buffer);

      if (fileChunks.hasNext()) {
        fileChunks.next();
        fileReader.readAsArrayBuffer(fileChunks.blob());
        return;
      }

      const hash = spark.end();
      const checksumAWS = Buffer.from(hash, 'hex').toString('base64');

      resolve(checksumAWS);
    };

    fileReader.onerror = err => {
      reject(err);
    };
  });

Solution

  • I ended up not needing to create my own Buffer of the file, instead if I post the fileReference returned by the input directly to axios (or xhr) the request automatically chunked the upload.

    Initially I could only make it work with XMLHttpRequest, but I quickly found a way to wrap this around axios which neatens the logic.

    XMLHttpRequest
    const xhr = createCORSRequest('PUT', url);
    
    if (!xhr) {
      console.log('CORS not supported');
    } else {
      xhr.onload = function(){
        if(xhr.status == 200) {
          console.log('completed');
        } else {
          console.log('Upload error: ' + xhr.status);
        }
      };
    
      xhr.onerror = function(err) {
        console.log(err)
      };
    
      xhr.upload.onprogress = function(progressEvent){
       console.log(progressEvent);
      };
    
      xhr.setRequestHeader('Content-Type', file.type);
      xhr.setRequestHeader('Content-MD5', md5_base64_binary);
      xhr.setRequestHeader('Content-Encoding', 'UTF-8');
      xhr.setRequestHeader('x-amz-acl', 'private');
      xhr.send(file);
    }
    

    Or using axios;

    uploadFileToS3 = fileObject => {
      return new Promise((resolve, reject) => {
        const { enqueueSnackbar } = this.props;
        const decodedURL = decodeURIComponent(fileObject.signedURL);
    
        axios
          .put(decodedURL, fileObject.fileRef, {
            headers: {
              'Content-Type': fileObject.mime,
              'Content-MD5': fileObject.checksum,
              'Content-Encoding': 'UTF-8',
              'x-amz-acl': 'private',
            },
            onUploadProgress: progressEvent => {
              const { loaded, total } = progressEvent;
              const uploadPercentage = parseInt(
                Math.round((loaded * 100) / total),
                10,
              );
              this.setState({ uploadProgress: uploadPercentage });
            },
          })
          .then(response => {
            resolve(response.data);
          })
          .catch(error => {
            reject(error);
          });
      });
    };