I am trying to migrate to S3, right now I serving the images trough URLs to other hostings, can I download it, change it's size while I am at it (to save on size and serve them faster), and upload it to S3 without saving a copy on my machine?
I made this for a project I am working on, and want to use CDN / Save data - might be useful to others as well, editing and suggestion are welcome.
// configure your env file
require("dotenv").config({ path: __dirname + "/../.env" });
const fetch = require("node-fetch");
const sharp = require("sharp");
const AWS = require("aws-sdk");
// endpoint example for DO (using S3 API)
const spacesEndpoint = new AWS.Endpoint("sfo3.digitaloceanspaces.com");
const s3 = new AWS.S3({
endpoint: spacesEndpoint,
accessKeyId: process.env.DO_SPACES_KEY,
secretAccessKey: process.env.DO_SPACES_SECRET,
});
function ShrinkSize(path, imageFit = "fill", width = 235, height = 320,) {
const resizeOptions = {
fit: imageFit,
};
const image = sharp(path).resize(width, height, resizeOptions)
.withMetadata()
.toBuffer({resolveWithObject: true})
return image;
}
async function uploadImage(buffer, path, name, fileType) {
const key = `${path}/${name}.${fileType}`
var params = {
Bucket: "your-bucket",
Key: key,
Body: buffer,
ACL: "public-read",
ContentType: `image/${fileType}`,
Metadata: {
"x-amz-meta-my-key": "your-value",
},
};
s3.putObject(params, function (err, data) {
if (err) {
console.log(err, err.stack);
throw new Error(`Failed to upload ${key}`)
}
});
console.log(`Succesfully uploaded to ${key}, returned Key`);
return key;
}
async function DownloadShrinkUpload(url, path, name, objectFit="cover") {
const res = await fetch(url);
const resBuffer = await res.buffer();
const shrinkedImage = await ShrinkSize(resBuffer);
const resKey = uploadImage(shrinkedImage.data, path, name, shrinkedImage.info.format);
return resKey;
}
// example
DownloadShrinkUpload(
"https://upload.wikimedia.org/wikipedia/commons/d/de/Wikipedia_Logo_1.0.png", 'images','wikipedia_logo');