Search code examples
javascriptamazon-s3file-uploadsails.jssails-skipper

multi file upload with skipper-better-s3 and sailjs returns the same key


As seen in the title, I am currently using sailjs + skipper-better-s3 for s3 upload. Started with uploading one file which works great, then because change request the need of multi-file upload at once so I added a for loop but by doing this, all keys will be the same and ended up the only one file is uploaded which is the last uploaded file but with the first upload filename.

I did read some articles and people are saying something like The problem is because for loop is synchronous and file upload is asynchronous and people saying the result of this is using a recursion which I tried too but no luck though, the same thing happens.

My recursive code is below...

s3_upload_multi: async (req, res) => {
    const generatePath = (rootPath, fieldName) => {
        let path;
        // this is just a switch statement here to check which fieldName is provided then value of path will depend on it
        // as for the other two variable is just checking if upload content type is correct
        return { path };
    };

    const processUpload = async ({
        fieldName,
        awsOp,
        fileExtension,
        rootPath,
        fileName,
    }) => {
        return new Promise(function (resolve, reject) {
            req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
                if (err) reject(err);
                const filesUploadedF = filesUploaded[0]; // F = first file
                const response = {
                    status: true,
                    errCode: 200,
                    msg: 'OK',
                    response: {
                        url: filesUploadedF.extra.Location,
                        size: filesUploadedF.size,
                        type: fileExtension,
                        filename: filesUploadedF.filename,
                        key: filesUploadedF.extra.Key,
                        field: fieldName,
                    }
                };

                resolve(response);
            });
        });
    }

    const process_recur = async (files, fieldName) => {
        if (files.length <= 0) return;
        const fileUpload = files[0].stream;
        const rootPath = `${sails.config.aws.upload.path.root}`;
        const fileCType = fileUpload.headers['content-type'];
        // console.log(fileCType, 'fileCType');

        const { path } = generatePath(rootPath, fieldName);

        const fileName = fileUpload.filename;

        const fileExtension = fileUpload.filename.split('.').pop();

        const genRan = await UtilsService.genRan(8);
        const fullPath = `${path}${genRan}-${fileName}`;
        const awsOp = {
            adapter: require('skipper-better-s3'),
            key: sails.config.aws.access_key,
            secret: sails.config.aws.secret_key,
            saveAs: fullPath,
            bucket: sails.config.aws.bucket,
            s3params: {
                ACL: 'public-read'
            },
        };

        const config = {
            fieldName,
            awsOp,
            fileExtension,
            rootPath,
            fileName,
        }

        const procceed = await processUpload(config);
        files.shift();
        await process_recur(files, fieldName);
    };

    try {
        const fieldName = req._fileparser.upstreams[0].fieldName;
        const files = req.file(fieldName)._files;
        await process_recur(files, fieldName);
    } catch (e) {
        console.log(e, 'inside UploadService');
        return false;
    }
}

below is the code for me using for loop which is quite similiar from above though

s3_upload_multi: async (req, res) => {
    const generatePath = (rootPath, fieldName) => {
        let path;
        // this is just a switch statement here to check which fieldName is provided then value of path will depend on it
        // as for the other two variable is just checking if upload content type is correct
        return { path };
    };

    const processUpload = async ({
        fieldName,
        awsOp,
        fileExtension,
        rootPath,
        fileName,
    }) => {
        return new Promise(function (resolve, reject) {
            req.file(fieldName).upload(awsOp, async (err, filesUploaded) => {
                if (err) reject(err);
                const filesUploadedF = filesUploaded[0]; // F = first file
                const response = {
                    status: true,
                    errCode: 200,
                    msg: 'OK',
                    response: {
                        url: filesUploadedF.extra.Location,
                        size: filesUploadedF.size,
                        type: fileExtension,
                        filename: filesUploadedF.filename,
                        key: filesUploadedF.extra.Key,
                        field: fieldName,
                    }
                };

                resolve(response);
            });
        });
    }

    try {
        const fieldName = req._fileparser.upstreams[0].fieldName;
        const files = req.file(fieldName)._files;

        for (const file of files) {
            const fileUpload = file.stream;
            const rootPath = `${sails.config.aws.upload.path.root}`;
            const fileCType = fileUpload.headers['content-type'];
            // console.log(fileCType, 'fileCType');

            const fileName = fileUpload.filename;
            const { path } = generatePath(rootPath, fieldName);



            const fileExtension = fileUpload.filename.split('.').pop();

            // using a variable here because if this is an image, a thumbnail will be created with the same name as the original one
            const genRan = await UtilsService.genRan(8);
            const fullPath = await `${path}${genRan}-${fileName}`;
            const awsOp = {
                adapter: require('skipper-better-s3'),
                key: sails.config.aws.access_key,
                secret: sails.config.aws.secret_key,
                saveAs: fullPath,
                bucket: sails.config.aws.bucket,
                s3params: {
                    ACL: 'public-read'
                },
            };

            const config = {
                fieldName,
                awsOp,
                fileExtension,
                rootPath,
                fileName,
            }

            const procceed = await processUpload(config);
            console.log(procceed, 'procceed');
        }
    } catch (e) {
        console.log(e, 'inside UploadService');
        return false;
    }

}

Which part am I making mistake that's causing such behavior? I checked my path it's totally correct with correct filename too when I console.log

Thanks in advance for any suggestions and help.


Solution

  • Took me quite a lot of time to figure this out ages ago. Especially you are using skipper-better-s3 which did not conclude as much detailed documentation as skipper, going back to look into skipper documentation actually the saveAs field doesn't only take string but also a function which you can then use that to get each file's filename and return it as needed so actually you do not even need to use neither resursive or for loop at all.

    for example with some of your codes

    const awsOp = {
        adapter: require('skipper-better-s3'),
        key: sails.config.aws.access_key,
        secret: sails.config.aws.secret_key,
        saveAs: (__newFileStream, next) => {
            // generatePath is what you wrote
            // __newFileStream.filename would the filename of each each before uploading
            // the path is pretty much the s3 key which includes your filename too
            const { path } = generatePath(rootPath, __newFileStream.filename, fieldName);
            return next(undefined, path);
        },
        bucket: sails.config.aws.bucket,
        s3params: {
            ACL: 'public-read'
        },
    };
    

    skipper documentation https://www.npmjs.com/package/skipper#customizing-at-rest-filenames-for-uploads