1
votes

I currently have a backend where i can upload multiple images using multer. these images are passed through sharp to resize the images to create a full and thumbnail image. An entry of the post is stored in my posts table in my database and the fileName of the images is also stored in Post_Images table in my database.

My problem is that i am currently using multer diskStorage to store my images on my disk and now i want to store the images on AWS S3.

I was trying to implement this but it did not work.

posts.js

const s3 = new AWS.S3({
    accessKeyId: process.env.AWS_ID,
    secretAccessKey: process.env.AWS_SECRET
    Bucket:process.env.AWS_BUCKET
})

const storage = multer.diskStorage({
    destination: function (req, file, callback) {
        callback(null, "./uploads");
    },
    filename: function (req, file, callback) {
        console.log(file.originalname);
        callback(null, file.originalname + "-" + new Date().toISOString() + "-" + uuidv4());
    },
});

const multerS3Config = multerS3({
    s3: s3,
    bucket: process.env.AWS_BUCKET,
    metadata: function (req, file, cb) {
          cb(null, { fieldName: file.fieldname });
    },
    key: function (req, file, cb) {
          console.log(file)
         // cb(null, file.originalname + "-" + new Date().toISOString() + "-" + uuidv4())//
          cb(null, new Date().toISOString() + "-" + uuidv4()+ "-" + file.originalname)
    }
});

const upload = multer({
    storage: multerS3Config,
    limits: { fieldSize: 25 * 1024 * 1024 },
});

router.post(
    "/",
    [
        upload.array("images", config.get("maxImageCount")),
        imageResize,
    ],
    async (req, res) => {
        const paths = await req.files.map((file) => ({ fileName: file.filename }));
        await Post.create({
            title: req.body.title,
            userId: req.body.userId,
            Post_Images: paths.map((x) => ({ images: x.fileName 
        })),
    },
    { 
      include: [Post_Image] }).then(
      res.status(201).send()

ImageResize.js

const sharp = require("sharp");
const path = require("path");
const fs = require("fs");

const outputFolder = "public/assets";

module.exports = async (req, res, next) => {
    const images = [];

    const resizePromises = req.files.map(async (file) => {
        await sharp(file.path)
        .resize(2000)
        .jpeg({ quality: 50 })
        .toFile(path.resolve(outputFolder, file.filename + "_full.jpg"));

        await sharp(file.path)
        .resize(100)
        .jpeg({ quality: 30 })
        .toFile(path.resolve(outputFolder, file.filename + "_thumb.jpg"));

        fs.unlinkSync(file.path);

        images.push(file.filename);
    });
    await Promise.all([...resizePromises]);

    req.images = images;

    next();
};

Everytime I try to upload a post, I keep getting

 UnhandledPromiseRejectionWarning: Error: Invalid input

I think I am getting this error because I am no longer using my disk storage and therefore I don't have access to file.path and file.filename.

When i do when i do console.log(file) in my ImageREsize.js middleware, i get this

 { fieldname: 'images', originalname: 'FILENAME', encoding: '7bit', 
 mimetype: 'image/jpeg', size: 38184, bucket: 'BUCKET NAME', key: 
 'IMAGE NAME', acl: 'private', contentType: 'application/octet- 
stream', contentDisposition: null, storageClass: 'STANDARD', 
serverSideEncryption: null, metadata: { fieldName: 'images' }, 
 location: 'IMAGE LINK', etag: '""', versionId: undefined }

UPDATE

Initially i was trying to use multerS3 to accomplish this, but after looking at several answers i decide to give this a try. To use plain multer to handle the incoming file, resize it with sharp and upload each resized file to S3 using AWS-SDK.

The entry of the image is getting stored in my postgres database but then i get an

Error: Input file is missing 

and when i check my bucket it is empty.

Here is my code

posts.js

const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ID,
secretAccessKey: process.env.AWS_SECRET,
Bucket:process.env.AWS_BUCKET,
region:process.env.AWS_REGION
})

const storage=multer.memoryStorage({})

const upload = multer({
storage: storage,
limits: { fieldSize: 25 * 1024 * 1024 },
});

router.post(
"/",
[    upload.array("images", config.get("maxImageCount")),
     imageResize,
],
async (req, res) => {
        const paths = await req.files.map((file) => ({ originalName: 
file.originalname + "-" + new Date().toISOString() + "-" + uuidv4() 
}));
    await Post.create({
        title: req.body.title,
        userId: req.body.userId,
        Post_Images: paths.map((x) => ({ images: x.originalName 
    })),
},
{ 
  include: [Post_Image] }).then(
  res.status(201).send()

imageResize.js

const sharp = require("sharp");
const path = require("path");
const fs = require("fs");

const outputFolder = "public/assets";

require("dotenv").config();
const AWS = require('aws-sdk')
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ID,
secretAccessKey: process.env.AWS_SECRET,
Bucket:process.env.AWS_BUCKET,
region:process.env.AWS_REGION
})

module.exports = async (req, res, next) => {
const images = [];

const resizePromises = req.files.map(async (file) => {
console.log(file)
await sharp(file)
  .resize(2000)
  .jpeg({ quality: 50 })
  .toBuffer()
  .then(resized=>s3.upload({
    Bucket:process.env.AWS_BUCKET,
    key:file.originalname + "-"  + new Date().toISOString() + "-" + uuidv4() + "_full.jpg",
    Body:resized
  }));

await sharp(file)
  .resize(100)
  .jpeg({ quality: 30 })
  .toBuffer()
  .then(resized=>s3.upload({
    Bucket:process.env.AWS_BUCKET,
    key:file.originalname + "-"  + new Date().toISOString() + "-" + uuidv4() + "_thumb.jpg",
    Body:resized
  }));

fs.unlinkSync(file.buffer);
images.push(file.originalname);

});

await Promise.all([...resizePromises]);

req.images = images;

next();
};
1
Comments are not for extended discussion; this conversation has been moved to chat.Samuel Liew♦

1 Answers

3
votes

I managed to get it to work, my problem was my imageResize middleware. I had to implement storage to AWS S3 to the code i had instead of saving it to the disk.

posts.js

const storage=multer.memoryStorage()

const upload = multer({
storage: storage,
limits: { fieldSize: 25 * 1024 * 1024 },
});

router.post(
"/",
[    upload.array("images", config.get("maxImageCount")),
 imageResize,
],
async (req, res) => {
const paths = await req.files.map((file) => ({ originalName: file.originalname}));
await Post.create({
    title: req.body.title,
    userId: req.body.userId,
    Post_Images: paths.map((x) => ({ images: x.originalName })),
},
{ 
include: [Post_Image] }).then(
res.status(201).send())

imageResize.js

const sharp = require("sharp");
require("dotenv").config();
const AWS = require('aws-sdk')

const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ID,
secretAccessKey: process.env.AWS_SECRET,
region:process.env.AWS_REGION
})

module.exports = async (req, res, next) => {
const images = [];

const resizePromises = req.files.map(async (file) => {
console.log(file)

await sharp(file.buffer)
  .resize(2000)
  .jpeg({ quality: 50 })
  .toBuffer()
  .then(resized=>s3.upload({
    Bucket:process.env.AWS_BUCKET,
    Key:file.originalname + "_full.jpg",
    Body:file.buffer,
    ACL: 'public-read'
  }).promise()),

  await sharp(file.buffer)
  .resize(100)
  .jpeg({ quality: 30 })
  .toBuffer()
  .then(resized=>s3.upload({
    Bucket:process.env.AWS_BUCKET,
    Key:file.originalname + "_thumb.jpg",
    Body:file.buffer,
    ACL: 'public-read'
  }).promise())

 images.push(file.originalname);

  });

await Promise.all([...resizePromises]);

req.images = images;

next();
};