Question

upload multiple files (spaces) using next js?

 await s3
        .upload({
          Bucket: paths({ id: req.user._id, ct: fields?.ct }),
          ACL: 'public-read',
          Key: key,
          Body: body,
          ContentType: contentType,
        })
        .send((err, data) => {
          if (err) {
            return res.status(500).json('Internal Error Please try again');
          }
          if (data) {
            console.log('data', data);
            return res.status(200).json({ files: data.Location });
          }
        });

Subscribe
Share

Submit an answer
You can type!ref in this text area to quickly search our full set of tutorials, documentation & marketplace offerings and insert the link!

These answers are provided by our Community. If you find them useful, show some love by clicking the heart. If you run into issues leave a comment, or add your own answer to help others.

full code here
(is this approach good ) :


handle.use(auth).post((req, res) => {
  const s3 = new AWS.S3({
    endpoint: new AWS.Endpoint('fra1.digitaloceanspaces.com'),
    accessKeyId: process.env.SPACES_KEY,
    secretAccessKey: process.env.SPACES_SECRET,
    region: 'fra1',
    // sslEnabled: true,
  });
  function paths({ id, ct }) {
    console.log('id', id, ct);
    switch (ct) {
      case 'tck':
        return `nor-platforms/${id}/tickets`;
      case 'prd':
        return `nor-platforms/${id}/products`;
      case 'pcs':
        return `nor-platforms/${id}`;
      default:
        return false;
    }
  }
  try {
    const form = new formidable.IncomingForm({ multiples: true, maxFileSize: 50 * 1024 * 1024 });
    form.parse(req, async function (err, fields, files) {
      console.log('files', files, 'fields', fields);
      const { file } = files;
      const body = file.length > 0 ? file.map((itm) => fs.readFileSync(itm.filepath)) : fs.readFileSync(file.filepath);
      const contentType = file.length > 0 ? file.map((itm) => itm.mimetype) : file.mimetype;
      const key = file.length > 0 ? file.map((itm) => itm.originalFilename) : file.originalFilename;
      // const data = await saveFile(files.file, req.user._id);
      console.log('body', body, 'key', key, 'contenttype', contentType);
      // res.status(200).json({ files: data });
      console.log('se', s3);
      await s3
        .upload({
          Bucket: paths({ id: req.user._id, ct: fields?.ct }),
          ACL: 'public-read',
          Key: key,
          Body: body,
          ContentType: contentType,
        })
        .send((err, data) => {
          if (err) {
            return res.status(500).json('Internal Error Please try again');
          }
          if (data) {
            console.log('data', data);
            return res.status(200).json({ files: data.Location });
          }
        });
    });
  } catch (err) {
    res.status(500).json('Internal Error Please try again');
  }
});