我无法将视频之类的大文件上传到s3。最终超时。我尝试使用fs进行流式传输,但一定不能正确使用它。
我已经尽我所能尝试使fs传输此文件。我不知道是否可以在单独的上传路径中将multerS3的fs方式使用。我可以上传图片和非常小的视频,但仅此而已。
// Here is my s3 index file which exports upload
const crypto = require('crypto');
const aws = require('aws-sdk');
const multerS3 = require('multer-s3');
const fs = require('fs');
aws.config.update({
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
region: 'us-east-1',
ACL: 'public-read'
});
const s3 = new aws.S3({ httpOptions: { timeout: 10 * 60 * 1000 }});
var options = { partSize: 5 * 1024 * 1024, queueSize: 10 };
const fileFilter = (req, file, cb) => {
console.log('file.mimetype is ', file.mimetype);
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png' || file.mimetype === 'video/mp4' || file.mimetype === 'video/avi' || file.mimetype === 'video/mov' || file.mimetype === 'video/quicktime') {
cb(null, true);
} else {
cb(new Error('Invalid file type'), false);
}
}
const filename = getFileName();
const upload = multer({
fileFilter,
storage: multerS3({
acl: 'public-read',
s3,
options,
body: fs.createReadStream(filename),
bucket: 'skilljack',
metadata: function (req, file, cb) {
cb(null, {fieldName: 'TESTING_METADATA'})
},
key: function (req, file, cb) {
let buf = crypto.randomBytes(16);
buf = buf.toString('hex');
let uniqFileName = file.originalname.replace(/\.jpeg|\.jpg|\.png|\.avi|\.mov|\.mp4/ig, '');
uniqFileName += buf;
cb(undefined, uniqFileName );
}
})
});
function getFileName (req, file) {
if (file) {
const body = fs.createReadStream(file.originalname);
return body;
}
}
module.exports = {
upload
}
// Here is my route file
const express = require('express');
const router = express.Router({ mergeParams: true });
const multer = require('multer');
const { upload } = require('../s3');
const { asyncErrorHandler, isLoggedIn, isAuthor } = require('../middleware');
const {
postCreate,
postDestroy
} = require('../controllers/posts');
router.post('/', isLoggedIn, asyncErrorHandler(isAuthor), upload.single('image'), asyncErrorHandler(postCreate));
router.delete('/:post_id', isLoggedIn, asyncErrorHandler(isAuthor), asyncErrorHandler(postDestroy));
module.exports = router;
答案 0 :(得分:0)
几天来遇到同样的问题。避免这种情况的方法(但是要花费很长时间)是将queueSize减小为1。
这种情况发生在您的网络状况不佳时,最终,某些队列将保持空闲状态并导致超时。
将QueueSize设置为1可以为要上传的部分分配更多带宽,从而避免超时。
答案 1 :(得分:0)
除了setting the queueSize
to 1之外,您可能还想禁用超时。
const s3 = new aws.S3({
accessKeyId: config.get('accessKeyId'),
secretAccessKey: config.get('secretAccessKey'),
Bucket: config.get('bucket'),
});
s3.config.httpOptions.timeout = 0