我的上传时间大约需要30秒才能上传20MB的文件,所以我切换到分段上传,上传20MB文件仍需要30秒左右。
我觉得它与这个功能有关:
function createMultipartUpload(s3, multiPartParams, buffer) {
partNum = 0;
startTime = new Date();
var deferred = $q.defer();
numPartsLeft = Math.ceil(buffer.byteLength / partSize);
s3.createMultipartUpload(multiPartParams, function(mpErr, multipart){
if (mpErr) { console.log('Error!', mpErr); return; }
console.log("Got upload ID", multipart.UploadId);
// Grab each partSize chunk and upload it as a part
for (var rangeStart = 0; rangeStart < buffer.byteLength; rangeStart += partSize) {
(function(rangeStart) {
console.log("PART ");
console.log(rangeStart);
partNum++;
var end = Math.min(rangeStart + partSize, buffer.byteLength),
partParams = {
Body: buffer.slice(rangeStart, end),
Bucket: multiPartParams.Bucket,
Key: multiPartParams.Key,
PartNumber: String(partNum),
UploadId: multipart.UploadId
};
// Send a single part
console.log('Uploading part: #', partParams.PartNumber, ', Range start:', rangeStart);
uploadPart(s3, multipart, partParams).then(function() {
console.log('1');
});
var delta = (new Date() - startTime) / 1000;
console.log('This part took', delta, 'seconds');
if(rangeStart > buffer.byteLength - partSize) {
deferred.resolve();
}
}(rangeStart));
}
});
return deferred.promise;
}
当它下载到uploadPart时,它不会异步执行:
如果你把这几秒加起来就可以得到30秒左右。如果它是并行的,它应该需要大约7.5s。
我不确定为什么会发生这种情况,因为函数是在一个循环内部调用的,因为它应该是异步的吗?
以下是完整的代码:
var startTime;
var partNum = 0;
var partSize = 1024 * 1024 * 5; // Minimum 5MB per chunk (except the last part) http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
var numPartsLeft = 0
var maxUploadTries = 3;
var multipartMap = {
Parts: []
};
function completeMultipartUpload(s3, doneParams) {
var deferred = $q.defer();
s3.completeMultipartUpload(doneParams, function(err, data) {
if (err) {
console.log("An error occurred while completing the multipart upload");
console.log(err);
deferred.reject();
} else {
var delta = (new Date() - startTime) / 1000;
console.log('Completed upload in', delta, 'seconds');
console.log('Final upload data:', data);
deferred.resolve();
}
});
return deferred.promise;
}
function uploadPart(s3, multipart, partParams, tryNum) {
console.log("UPLOADING PART...");
var deferred = $q.defer();
var tryNum = tryNum || 1;
s3.uploadPart(partParams, function(multiErr, mData) {
if (multiErr){
console.log('multiErr, upload part error:', multiErr);
if (tryNum < maxUploadTries) {
console.log('Retrying upload of part: #', partParams.PartNumber)
return uploadPart(s3, multipart, partParams, tryNum + 1);
} else {
console.log('Failed uploading part: #', partParams.PartNumber)
}
return;
}
multipartMap.Parts[this.request.params.PartNumber - 1] = {
ETag: mData.ETag,
PartNumber: Number(this.request.params.PartNumber)
};
console.log("Completed part", this.request.params.PartNumber);
console.log('mData', mData);
if (--numPartsLeft > 0) deferred.resolve(); // complete only when all parts uploaded
var doneParams = {
Bucket: partParams.Bucket,
Key: partParams.Key,
MultipartUpload: multipartMap,
UploadId: multipart.UploadId
};
console.log("Completing upload...");
completeMultipartUpload(s3, doneParams).then(function() {
deferred.resolve();
});
});
return deferred.promise;
}
// Multipart
function createMultipartUpload(s3, multiPartParams, buffer) {
partNum = 0;
startTime = new Date();
var deferred = $q.defer();
numPartsLeft = Math.ceil(buffer.byteLength / partSize);
s3.createMultipartUpload(multiPartParams, function(mpErr, multipart){
if (mpErr) { console.log('Error!', mpErr); return; }
console.log("Got upload ID", multipart.UploadId);
// Grab each partSize chunk and upload it as a part
for (var rangeStart = 0; rangeStart < buffer.byteLength; rangeStart += partSize) {
(function(rangeStart) {
console.log("PART ");
console.log(rangeStart);
partNum++;
var end = Math.min(rangeStart + partSize, buffer.byteLength),
partParams = {
Body: buffer.slice(rangeStart, end),
Bucket: multiPartParams.Bucket,
Key: multiPartParams.Key,
PartNumber: String(partNum),
UploadId: multipart.UploadId
};
// Send a single part
console.log('Uploading part: #', partParams.PartNumber, ', Range start:', rangeStart);
uploadPart(s3, multipart, partParams).then(function() {
console.log('1');
});
var delta = (new Date() - startTime) / 1000;
console.log('This part took', delta, 'seconds');
if(rangeStart > buffer.byteLength - partSize) {
deferred.resolve();
}
}(rangeStart));
}
});
return deferred.promise;
}