如何结合视频上传块Node.js

时间:2019-02-05 19:19:25

标签: node.js express busboy

我试图通过使用busboy进行分块将大型(8.3GB)视频上传到我的Node.js(Express)服务器。如何接收每个块(busboy正在做这部分)并将其拼成一个完整的视频?

我一直在研究可读性和可写性的流,但我从未获得完整的视频。我一直覆盖其中的一部分,大约有1 GB。

这是我的代码:

req.busboy.on('file', (fieldname, file, filename) => {
    logger.info(`Upload of '${filename}' started`);

    const video = fs.createReadStream(path.join(`${process.cwd()}/uploads`, filename));
    const fstream = fs.createWriteStream(path.join(`${process.cwd()}/uploads`, filename));

    if (video) {
        video.pipe(fstream);
    }

    file.pipe(fstream);

    fstream.on('close', () => {
        logger.info(`Upload of '${filename}' finished`);
        res.status(200).send(`Upload of '${filename}' finished`);
    }); 
});

4 个答案:

答案 0 :(得分:1)

使用流

multer使您可以轻松地将文件上传作为快递路线的一部分。这对于不占用大量内存的小型文件非常有用。

将大文件加载到内存中的问题是您实际上可能会用完内存并导致应用程序崩溃。

使用multipart / form-data请求。这可以通过将readStream分配给该字段来解决,而不是在您的请求选项中

流对于优化性能非常有价值。

答案 1 :(得分:0)

我认为multer很好,你尝试过multer吗?

答案 2 :(得分:0)

尝试使用此代码示例,我认为它将为您工作。

busboy.on("file", function(fieldName, file, filename, encoding, mimetype){
    const writeStream = fs.createWriteStream(writePath);
    file.pipe(writeStream);

    file.on("data", data => {
        totalSize += data.length;
        cb(totalSize);
    });

    file.on("end", () => {
        console.log("File "+ fieldName +" finished");
    });
});

您也可以参考此链接来解决此问题

https://github.com/mscdex/busboy/issues/143

答案 3 :(得分:0)

12个多小时后,我使用this article that was given to me中的片段弄清楚了。我想到了以下代码:

//busboy is middleware on my index.js
const fs = require('fs-extra');
const streamToBuffer = require('fast-stream-to-buffer');

//API function called first
uploadVideoChunks(req, res) {
    req.pipe(req.busboy);

    req.busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
        const fileNameBase = filename.replace(/\.[^/.]+$/, '');

        //save all the chunks to a temp folder with .tmp extensions
        streamToBuffer(file, function (error, buffer) {
            const chunkDir = `${process.cwd()}/uploads/${fileNameBase}`;
            fs.outputFileSync(path.join(chunkDir, `${Date.now()}-${fileNameBase}.tmp`), buffer);
        });
    });

    req.busboy.on('finish', () => {
        res.status(200).send(`Finshed uploading chunk`);
    });
}

//API function called once all chunks are uploaded
saveToFile(req, res) {
    const { filename, profileId, movieId } = req.body;

    const uploadDir = `${process.cwd()}/uploads`;
    const fileNameBase = filename.replace(/\.[^/.]+$/, '');
    const chunkDir = `${uploadDir}/${fileNameBase}`;
    let outputFile = fs.createWriteStream(path.join(uploadDir, filename));

    fs.readdir(chunkDir, function(error, filenames) {
       if (error) {
           throw new Error('Cannot get upload chunks!');
       }

       //loop through the temp dir and write to the stream to create a new file
       filenames.forEach(function(tempName) {
           const data = fs.readFileSync(`${chunkDir}/${tempName}`);
                outputFile.write(data);
                //delete the chunk we just handled
                fs.removeSync(`${chunkDir}/${tempName}`);
           });

            outputFile.end();
        });

        outputFile.on('finish', async function () {
            //delete the temp folder once the file is written
            fs.removeSync(chunkDir);
        }
    });
}