合并Node.js流

时间:2015-07-18 16:22:49

标签: javascript node.js stream

我有一堆文件,我将相应的多个流中的某些数据读取,处理并合并到一个流中。

是否有比下面更优雅的解决方案(有一个单独的计数器,在所有源流发出combinedStream.end()后调用end):

let combinedStream = ....;

let counter = 0;

filePaths.forEach(function(filePath) {
  counter += 1;
  const fileStream = fs.createReadStream(filePath);
  const myStream = new MyStream(fileStream);

  myStream.on('data', myStream.write);
  myStream.on('end', function() {
    counter -= 1;
    if (counter === 0) {
      combinedStream.end();
    }
  });
});

return combinedStream;

2 个答案:

答案 0 :(得分:1)

更简洁的方法可能是repo中使用的方法,即使它只是隐藏你的计数器并让你处理一个更舒适的基于回调的模型。

这样,您的代码将如下所示:

let sharedStream = ...

function onEachFilename(filename, callback) {
    // here you can read from the stream and push the data on the shared one,
    // then invoke the "internal" callback on the end event
}

function onEndAll() {
    // here you can finalize and close the shared stream
}

forEach(filenames, onEachFilename, onEndAll);

请记住,在调用了所有onEnd函数后,仍有一个函数负责为您计数并调用callback函数。

答案 1 :(得分:0)

您只需使用转换流处理文件,然后通过管道传输到PassThrough Stream。

因为您正在使用let,我想您可以使用ES2015。

  "use strict";

 let fs=require('fs');
 let filePaths=['./tmp/h.txt','./tmp/s.txt'];


 let Stream = require('stream');

  class StreamProcessor  {

   constructor() {
    this.process_streams = [];
   }

   push (source_stream) { 
   // Create a new Transform Stream
   let transform = new StreamTransformer();
    // Register the finish event and pipe
  transform.processed = transform.wait.call(transform);
   source_stream.pipe(transform);
   // push the stream to the internal array
  this.process_streams.push(transform);
   }

   done (callback) {  
    let streams = this.process_streams;
 // Wait for all Transform streams to finish processing
   Promise.all( 
   streams.map(function(s) {return s.processed; })
   )
   .then ( function() {
   let combined_stream=new Stream.PassThrough();
   streams.forEach(function (stream) {
   stream.pipe(combined_stream); 
   });
   // Call the callback with stream
    callback(null,combined_stream);
   })
   .catch(function (err) {
    callback(err);
    });
   }

   }     

  class StreamTransformer extends Stream.Transform {
     constructor () {
     // call super
     super();
     } 

     _transform(chunk,enc, transformed) {
     // process files here 
     let data=chunk.toString();
      data=data.substring(0,data.length-2);
     this.push(data);
     transformed();
     }

     _flush(flushed) {
     // for additonal at end
     this.push('\n');
     flushed();
     }

    wait() {
    // returns a promise that resolves, when all the data is processed;
    let stream = this;

    return new Promise(function(resolve,reject)  {
   stream.on('finish', function() {
   resolve(true); });
   stream.on('error', function(err) {
   reject(err);
   });
    });

    }

  }

  ///  Now you can do..

  let process_stream = new StreamProcessor(); 

   filePaths.forEach(function (fpath) {
   let fstream = fs.createReadStream(fpath);
   process_stream.push(fstream);
  });

   process_stream.done(  function
    (err,combined_stream) {
 // Consume the combines stream
 combined_stream.pipe(process.stdout);
  });

测试文件包含'hello'和'stream'

     // Outputs is
    // hell
   // stream 

这可以进一步改善...... :/