我正在尝试编写一个使用node.js在s3上存储图像的图像服务器。上传图像工作正常,我可以使用s3浏览器客户端正确下载和查看它(我特别使用dragondisk,但我已经成功下载了其他的),但是当我用节点下载它并尝试把它写入磁盘,我无法打开文件(它说它可能已损坏或使用预览无法识别的文件格式)。我正在使用amazon sdk进行节点和fs来编写文件。我知道你可以将一个可选的编码传递给fs.writeFile,但我已经尝试了所有这些并且它不起作用。我还尝试在getObject上设置putObject和ResponseContentType上的ContentType,以及在各种组合中设置ContentEncoding和ResponseContentEncoding(以及所有这些内容)。结果相同。这是一些代码:
var AWS = require('aws-sdk')
, gm = require('../lib/gm')
, uuid = require('node-uui')
, fs = require('fs');
AWS.config.loadFromPath('./amazonConfig.json');
var s3 = new AWS.S3();
var bucket = 'myBucketName'; // There's other logic here to set the bucket name.
exports.upload = function(req, res) {
var id = uuid.v4();
gm.format("/path/to/some/image.jpg", function(format){
var key = req.params.dir + "/" + id + "/default." + format;
fs.readFile('/path/to/some/image.jpg', function(err, data){
if (err) { console.warn(err); }
else {
s3.client.putObject({
Bucket: bucket,
Key: key,
Body: data,
ContentType: 'image/jpeg'
// I've also tried adding ContentEncoding (in various formats) here.
}).done(function(response){
res.status(200).end(JSON.stringify({ok:1, id: id}));
}).fail(function(response){
res.status(response.httpResponse.statusCode).end(JSON.stringify(({err: response})));
});
}
});
});
};
exports.get = function(req, res) {
var key = req.params.dir + "/" + req.params.id + "/default.JPEG";
s3.client.getObject({
Bucket: bucket,
Key: key,
ResponseContentType: 'image/jpeg'
// Tried ResponseContentEncoding here in base64, binary, and utf8
}).done(function(response){
res.status(200).end(JSON.stringify({ok:1, response: response}));
var filename = '/path/to/new/image/default.JPEG';
fs.writeFile(filename, response.data.Body, function(err){
if (err) console.warn(err);
// This DOES write the file, just not as an image that can be opened.
// I've tried pretty much every encoding as the optional third parameter
// and I've matched the encodings to the ResponseContentEncoding and
// ContentEncoding above (in case it needs to be the same)
});
}).fail(function(response){
res.status(response.httpResponse.statusCode).end(JSON.stringify({err: response}));
});
};
顺便说一句,我正在使用express进行路由,所以这就是req.params的来源。
答案 0 :(得分:11)
对于仍在努力解决这个问题的人。这是我使用本机aws-sdk的方法。
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./s3_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'myBucket'} } );
在路由器方法内: - ContentType应设置为图像文件的内容类型
buf = new Buffer(req.body.imageBinary.replace(/^data:image\/\w+;base64,/, ""),'base64')
var data = {
Key: req.body.userId,
Body: buf,
ContentEncoding: 'base64',
ContentType: 'image/jpeg'
};
s3Bucket.putObject(data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
});
s3_config.json文件是: -
{
"accessKeyId":"xxxxxxxxxxxxxxxx",
"secretAccessKey":"xxxxxxxxxxxxxx",
"region":"us-east-1"
}
答案 1 :(得分:5)
好的,经过大量的试验和错误,我已经想出了如何做到这一点。我最终切换到knox,但据推测,你可以使用与aws-sdk类似的策略。这种解决方案让我说,“必须有一种比这更好的方式”,但我对任何有效的方法都很满意。
var imgData = "";
client.getFile(key, function(err, fileRes){
fileRes.on('data', function(chunk){
imgData += chunk.toString('binary');
}).on('end', function(){
res.set('Content-Type', pic.mime);
res.set('Content-Length', fileRes.headers['content-length']);
res.send(new Buffer(imgData, 'binary'));
});
});
getFile()
将数据块作为缓冲区返回。有人会认为你可以将结果直接输出到前端,但无论出于何种原因,这是我能够使服务正确返回图像的唯一方法。将缓冲区写入二进制字符串感觉是多余的,只是将其写回缓冲区,但是嘿,如果它有效,它就可以工作。如果有人找到更有效的解决方案,我很乐意听到它。
答案 2 :(得分:0)
uploadfile(file, filename, folder) {
const bucket = new S3(
{
accessKeyId: 'enter your access key id here',
secretAccessKey: 'enter your secret key here.',
region: 'us-east-2'
});
const params = {
Bucket: 'enter your bucket here.',
Key: folder + '/' + filename + ".jpg",
ACL: 'public-read',
ContentEncoding : 'base64,',
Body: new Buffer(file.replace(/^data:image\/\w+;base64,/, ""),'base64'),
ContentType: 'image/jpeg'
};
bucket.upload(params, function (err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
});
}
答案 3 :(得分:0)
作为另一种解决方案。我改用Body:fs.createReadStream修复了我的问题,它的工作原理就像一个魅力。
const uploadFile = () => {
fs.readFile(filename, (err, data) => {
if (err) throw err;
const params = {
Bucket: `${process.env.S3_Bucket}/ProfilePics`, // pass your bucket name
Key: `${decoded.id}-pic.${filetypeabbrv}`, // file will be saved as testBucket/contacts.csv
Body: fs.createReadStream(req.file.path),
ContentType: filetype,
};
s3.upload(params, function (s3Err, data) {
if (s3Err) throw s3Err;
console.log(`File uploaded successfully at ${data.Location}`);
});
});
};