我正在创建一个npm模块,使用chunk upload在facebook api上传视频。
我想知道是否应该使用节点流
目前我这样做:
1.请求api获得块的大小。
2.使用unix split cmd
分割原始视频
3.按块发送块
4.删除所有块
我使用流进行一些测试,输出块看起来很好。
var readable = fs.createReadStream('video.mp4');
var i = 0;
readable.on('readable', () => {
var chunk;
while(null !== (chunk = readable.read(1048576))) {
var fd = fs.openSync('chunk' + i++, 'w');
fs.writeSync(fd, chunk, 0, chunk.length);
}
});
我应该使用Stream进行重构吗? 这是指向当前git repo的链接 谢谢你的时间。
答案 0 :(得分:0)
更大的问题是“为什么”?你肯定可以简单地传播这个,但是你应该吗?如果您的代码现在令人满意,也许您不应该触摸它。
另一方面,如果您使用request-promise
,则请求已支持流式传输 - 与require('fs').readFileStream().pipe(request(options))
中一样;也许您可以简单地整体播放视频。
流式传输文件可以减少整个模块的内存占用,在某些情况下甚至可能更快地完成上传,但实际上取决于多个方面。试着看看它是怎么回事。
顺便说一句,您的示例使用writeSync。为何同步?我想只是为了测试它是否做得对吗?
答案 1 :(得分:0)
以下是我一年前使用Facebook Graph API在Node Js上传视频的代码:
// It publishes the video in chunks to Facebook until it finishes
var publishVideo = function(access_token, text, file) {
var stats = fs.statSync(file.path);
var formData = {
access_token: access_token,
upload_phase: 'start',
file_size: stats.size
};
// First step, we send the video size
request.post({ url: 'https://graph-video.facebook.com/v2.3/me/videos', form: formData, json: true },
function(err, response, body) {
if (!err) {
// With the response, we start making the video transfer
transferProcess(undefined, file, access_token, body, function(err, currentUploadSession) {
if (!err) {
var formData = {
access_token: access_token,
upload_phase: 'finish',
upload_session_id: currentUploadSession,
description: text
};
// Once the video transfer ended, we publish the video
request.post({ url: 'https://graph-video.facebook.com/v2.3/me/videos', form: formData, json: true });
}
});
}
});
};
// It processes each part of the video until it finishes
var transferProcess = function(uploadSession, file, access_token, body, callback) {
// First we generate a copy of the file in order to be independent to the original file
// because it can have problems when opening it at the same time from other file
var copyFileName = file.path + '-facebook';
fse.copySync(file.path, copyFileName);
// Once we have the copy, we open it
var fd = fs.openSync(copyFileName, 'r');
var bytesRead, data, bufferLength = 1000000000;
var buffer = new Buffer(bufferLength);
var length = body.end_offset - body.start_offset;
// We read the amount of bytes specified from body.start_offset until length
bytesRead = fs.readSync(fd, buffer, body.start_offset, length, null);
data = bytesRead < bufferLength ? buffer.slice(0, bytesRead) : buffer;
// We generate a file with the recently read data, and with a name of copyFileName-chunked-12313123
var chunkFileName = copyFileName + '-chunked-' + body.start_offset;
// We create the file so then we can read it and send it
fs.writeFile(chunkFileName, data, function(err) {
if (err) {
callback(err);
}
else {
var currentUploadSession = uploadSession ? uploadSession : body.upload_session_id;
var startOffset = parseInt(body.start_offset);
var formData = {
upload_phase: 'transfer',
start_offset: startOffset,
upload_session_id: currentUploadSession,
access_token: access_token
};
formData.video_file_chunk = fs.createReadStream(chunkFileName);
// Once we have the file written, we upload it
request.post({ url: 'https://graph-video.facebook.com/v2.3/me/videos',
formData: formData, json: true }, function (err, response, body) {
// If there was an error, we return it
if (err || body.error) {
callback(err ? err : body.error, null);
}
// If the lecture of the file has ended, facebook will send us the body.start_offset and the body.end_offset,
// if they are the same, it means that we have ended uploading the video, so we return
else if (body.start_offset === body.end_offset) {
callback(err, currentUploadSession);
}
// Else, we keep reading the file
else {
transferProcess(currentUploadSession, file, access_token, body, callback);
}
});
}
});
};
希望你能用它。
乔尔