'Upload image to s3 bucket - react native and node js
Within my app a user can select a profile image and i would like that image to be uploaded to an s3 bucket when the user saves their profile data
I pass the image data (and json, which consists of name, email, telephone for example) from my app to an express server and upload there
At present I can pass the image data (the url it seems at present) to an s3 bucket and it saves
I don't think i'm actually saving the image itself though, as when downloading from s3 (manually) and trying to open on my mac it states it may be damaged and i cannot see the image
Feel daft for asking but how do i actually upload the image itself? Thanks
React Native Side
const handleFormSubmit = formData => {
const jsonData = JSON.stringify({
...formData,
});
// Handle profile image
if (imageProps && imageProps.uri) {
const data = new FormData();
data.append('formBody', jsonData);
data.append('image', {
uri:
Platform.OS === 'android'
? imageProps.uri
: imageProps.uri.replace('file://', ''),
type: imageProps.type,
name: imageProps.fileName,
});
sendRequest(data);
} else {
sendRequest(jsonData);
}
};
const sendRequest = data => {
let responseData;
fetch('http://localhost:8080/users/api/update_user_profile', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: data,
})
.then(response => {
responseData = response;
return response.json();
})
.then(jsonData => {
console.log(jsonData)
})
.catch(error => {
console.log(error)
});
};
Server Side
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
// Setting up S3 upload parameters
const params = {
Bucket: 'bucket-folder',
ACL: 'public-read',
Key: req.files.image.name,
Body: req.files.image.path
};
const stored = await s3.upload(params).promise();
Solution 1:[1]
You can use Multer for uploading files to s3.
const multer = require('multer');
const AWS = require('aws-sdk');
const uniqid = require('uniqid');
const storage = multer.memoryStorage();
const upload = multer({ storage });
// ? Posts new file to amazon and saves to db
router.post(
'/:id',
upload.single('attachment'),
async (req, res) => {
const unique = uniqid.time();
const { file } = req;
const { filePath } = req.body;
const { id } = req.params;
const s3FileURL = process.env.AWS_UPLOADED_FILE_URL;
const region = process.env.AWS_REGION;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const Bucket = process.env.AWS_BUCKET_NAME + '/' + filePath;
const Key = `${id}/${unique}-${file.originalname}`;
const Body = file.buffer;
const ContentType = file.mimetype;
const ACL = 'public-read';
const s3bucket = new AWS.S3({
accessKeyId,
secretAccessKey,
region,
});
const params = {
Bucket,
Key,
Body,
ContentType,
ACL,
};
s3bucket.upload(params, async (err, data) => {
if (err) {
res.status(500).json({ error: true, Message: err });
} else {
console.log(params);
const newFileUploaded = {
description: req.body.description,
fileLink: `${s3FileURL}${filePath}/${id}/${unique}-${file.originalname}`,
s3_key: params.Key,
};
try {
const response = await postFile({
name: req.body.name,
attachment: newFileUploaded,
alt: req.body.alt,
user: req.body.user,
relatedID: req.body.relatedID,
});
res.status(200).json({
message: response.message,
success: response.success,
result: response.result,
});
} catch (e) {
res.status(500).json({
message:
'File upoladed but Db couldnt saved request (upload by ID)',
success: false,
result: [],
});
}
}
});
}
);
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|---|
| Solution 1 | Samil Kahraman |
