如何在“ Transform#flush”

时间:2019-01-02 01:09:55

标签: node.js node-streams

_flush方法的Transform实现中处理反向压力的正确方法是什么?换句话说,如果.push()在冲洗时返回false,是否有任何机制可以适当地处理下游的背压?

文档要求.push()返回false时立即停止推送,但是当下游想要恢复读取时,Transform除了覆盖this.read之外,没有其他方法可以监听;但是那会是什么样子,这样做有任何危险吗?

这是一个您可以使用的示例。

const stream = require('stream');

// a string large enough to overflow the buffer
const S_OVERFLOW = '-'.repeat((new stream.Readable()).readableHighWaterMark+1);


class example extends stream.Transform {
    constructor() {
        super({
            writableObjectMode: true,
        });

        // some internal queue that will be emptied once writable side ends
        Object.assign(this, {
            internal_queue: [],
        });
    }

    _transform(g_chunk, s_encoding, fk_transform) {
        // store chunk in internal queue
        this.internal_queue.push(g_chunk);

        // done with transform (no writes)
        fk_transform();
    }

    _flush(fk_flush) {
        console.warn('starting to flush');

        // now that writable side has ended, flush internal queue
        this.resumeFlush(fk_flush);
    }

    resumeFlush(fk_flush) {
        let a_queue = this.internal_queue;

        // still data left in internal queue
        while(a_queue.length) {
            // remove an item from queue
            a_queue.pop();

            // intentionally overflow buffer
            if(!this.push(S_OVERFLOW)) {
                //
                // WHAT TO DO HERE?
                //

                // go asynchronous
                return;
            }
        }

        console.warn('finished flush');

        // callback
        fk_flush();
    }
}


// instantiate transform
let ds_transform = new example();

// pipe to stdout
ds_transform.pipe(process.stdout);

// write some data (needs to happen twice)
ds_transform.write({
    item: 0,
});

ds_transform.write({
    item: 1,
});

// end stream
ds_transform.end();

将stdout输入到/dev/null,以便stderr仍可打印到控制台:

$ node transform.js > /dev/null
starting to flush

1 个答案:

答案 0 :(得分:0)

这里的真正问题是您应该使用双工而不是转换。由于每次对_transform的调用实际上是在缓冲数据,而不是对其进行一些(a /)同步转换,因此这种实现方式更适合作为双工,从而对_write()的调用将缓冲数据,并且对_read()的调用开始推送,直到检测到背压为止。

const stream = require('stream');

// a string large enough to overflow the buffer
const S_OVERFLOW = '-'.repeat((new stream.Readable()).readableHighWaterMark+1);


class example extends stream.Duplex {
    constructor() {
        super({
            writableObjectMode: true,
        });

        // some internal queue that will be emptied once writable side ends
        Object.assign(this, {
            internal_queue: [],
        });
    }

    _write(g_chunk, s_encoding, fk_write) {
        // store chunk in internal queue
        this.internal_queue.push(g_chunk);

        // done with transform (no writes)
        fk_write();
    }

    _read() {
        console.warn('called _read()');
        let a_queue = this.internal_queue;

        // still data left in internal queue
        while(a_queue.length) {
            // remove an item from queue
            a_queue.pop();

            // intentionally overflow buffer
            if(!this.push(S_OVERFLOW)) {
                // go asynchronous
                return;
            }
        }

        console.warn('finished reading');

        // nothing more to read
        this.push(null);
    }
}


// instantiate transform
let ds_transform = new example();

// pipe to stdout
ds_transform.pipe(process.stdout);

// write some data (needs to happen twice)
ds_transform.write({
    item: 0,
});

ds_transform.write({
    item: 1,
});

// end stream
ds_transform.end();

那么您会得到:

$ node duplex.js > /dev/null
called _read()
called _read()
called _read()
finished reading