rxjs将后期结果订阅为空流

时间:2018-06-19 17:53:16

标签: javascript rxjs event-loop

我有以下代码。照原样,在注释了几行后,它可以正常工作。我订阅了一个流,进行一些处理并将数据流传输到客户端。但是,如果我取消评论,则我的流始终为空,即count中的getEntryQueryStream始终为0。我怀疑这与以下事实有关:我较晚订阅该流,因此错过了所有内容价值。

// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';

const {ObjectId} = imongo;

function searchElastic({query, sort}, limit) {
    const body = {
        size: 1,
        query,
        _source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
        sort
    };
    // keep the search results "scrollable" for 30 secs
    const scroll = '30s';
    let count = 0;

    return Rx.Observable
        .fromPromise(elasticClient.search({ index: 'data', body, scroll }))
        .concatMap(({_scroll_id, hits: {hits}}) => {
            const subject = new Rx.Subject();

            // subject needs to be subscribed to before adding new values
            // and therefore completing the stream => execute in next tick
            setImmediate(() => {
                if(hits.length) {
                    // initial data
                    subject.onNext(hits[0]._source);
                    // code that breaks
                    //if(limit && ++count === limit) {
                        //subject.onCompleted();
                        //return;
                    //}

                    const handleDoc = (err, res) => {
                        if(err) {
                            subject.onError(err);
                            return;
                        }

                        const {_scroll_id, hits: {hits}} = res;

                        if(!hits.length) {
                            subject.onCompleted();
                        } else {
                            subject.onNext(hits[0]._source);
                            // code that breaks
                            //if(limit && ++count === limit) {
                                //subject.onCompleted();
                                //return;
                            //}

                            setImmediate(() =>
                                elasticClient.scroll({scroll, scrollId: _scroll_id},
                                    handleDoc));
                        }
                    };

                    setImmediate(() =>
                        elasticClient.scroll({scroll, scrollId: _scroll_id},
                            handleDoc));
                } else {
                    subject.onCompleted();
                }
            });

            return subject.asObservable();
        });
}

function getElasticQuery(searchString, filter) {
    const query = _.cloneDeep(filter);
    query.query.filtered.filter.bool.must.push({
        query: {
            query_string: {
                query: searchString
            }
        }
    });

    return _.extend({}, query);
}

function fetchAncestors(ancestorIds, ancestors, format) {
    return imongo.find('session', 'sparse_data', {
        query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
        fields: { name: 1, type: 1 }
    })
    .map(entry => {
        entry.id = entry._id.toString();
        delete entry._id;

        return entry;
    })
    // we don't care about the results
    // but have to wait for stream to finish
    .defaultIfEmpty()
    .last();
}

function getEntryQueryStream(entriesQuery, query, limit) {
    const {parentSearchFilter, filter, format} = query;

    return searchElastic(entriesQuery, limit)
        .concatMap(entry => {
            const ancestors = entry.ancestors || [];

            // if no parents => doesn't match
            if(!ancestors.length) {
                return Rx.Observable.empty();
            }

            const parentsQuery = getElasticQuery(parentSearchFilter, filter);
            parentsQuery.query.filtered.filter.bool.must.push({
                terms: {
                    id: ancestors
                }
            });

            // fetch parent entries
            return searchElastic(parentsQuery)
                .count()
                .concatMap(count => {
                    // no parents match query
                    if(!count) {
                        return Rx.Observable.empty();
                    }

                    // fetch all other ancestors that weren't part of the query results
                    // and are still a string (id)
                    const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
                    return fetchAncestors(restAncestorsToFetch, ancestors, format)
                        .concatMap(() => Rx.Observable.just(entry));
                });
        });
}

function executeQuery(query, res) {
    try {
        const stream = getEntryQueryStream(query);
        // stream is passed on to another function here where we subscribe to it like:
        // stream
        //     .map(x => whatever(x))
        //     .subscribe(
        //         x => res.write(x),
        //         err => console.error(err),
        //         () => res.end());
    } catch(e) {
        logger.error(e);
        res.status(500).json(e);
    }
}

我不明白为什么那几行代码会破坏所有内容,或者我怎么解决。

1 个答案:

答案 0 :(得分:0)

您的用例非常复杂,您可以从建立像模式波纹管这样的searchElastic方法开始。

  1. 首先将elasticClient.scroll转换为可观察的
  2. 设置elasticClient..search()的初始化数据
  3. 解决搜索后,您应该获得Scrollid
  4. expand()运算符可让您递归执行可观察的elasticClientScroll
  5. 使用地图选择要返回的数据
  6. take何时决定何时完成此视频流

正确的结果将是您执行searchElastic()。subscribe()之后,流将连续发射,直到没有更多数据可取。

希望这种结构是正确的,并且可以帮助您入门。

  function searchElastic({ query, sort }, limit) {
  const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
  let obj = {
    body: {
      size: 1,
      query,
      _source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
      sort
    },
    scroll: '30s'
  }

  return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
    .expand(({ _scroll_id, hits: { hits } }) => {
      // guess there are more logic here ..... 
      // to update the scroll id or something
      return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
   //.. select the res you want to return  
    )
    }).takeWhile(res => res.hits.length)
}