场景:我正在从数据库中读取数据流。我想要做的是读取一大块数据,处理它并使用rx-java 2流式传输。但是当我处理和流式传输时,我想在一个单独的线程上加载下一个数据块(预拉下一个块。)
我试过了:
Flowable.generate(...)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.map(...)
.subscribe(...)
不幸的是,这会导致generate方法在io线程上不断运行。我只想要一个预拉。我尝试过使用缓冲区,但实际上最终会创建块列表。
所以基本上当我在一个单独的线程上传输当前的块时,我想读取下一个块并准备就绪。
不确定这是否可行。我需要使用generate,因为没有关于数据何时结束的概念。
我尝试过使用Subscription :: request使用subscribe(new FlowableSubscriber(){...}},但这似乎不起作用。
答案 0 :(得分:1)
RxJava中没有标准运算符可以使用这种类型的请求 - 响应模式。在将当前项目发送到下游之前,您需要一个请求的自定义observeOn
。
import java.util.concurrent.atomic.*;
import org.junit.Test;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.Scheduler.Worker;
import io.reactivex.internal.util.BackpressureHelper;
import io.reactivex.schedulers.Schedulers;
public class LockstepObserveOnTest {
@Test
public void test() {
Flowable.generate(() -> 0, (s, e) -> {
System.out.println("Generating " + s);
Thread.sleep(500);
e.onNext(s);
return s + 1;
})
.subscribeOn(Schedulers.io())
.compose(new LockstepObserveOn<>(Schedulers.computation()))
.map(v -> {
Thread.sleep(250);
System.out.println("Processing " + v);
Thread.sleep(250);
return v;
})
.take(50)
.blockingSubscribe();
}
static final class LockstepObserveOn<T> extends Flowable<T>
implements FlowableTransformer<T, T> {
final Flowable<T> source;
final Scheduler scheduler;
LockstepObserveOn(Scheduler scheduler) {
this(null, scheduler);
}
LockstepObserveOn(Flowable<T> source, Scheduler scheduler) {
this.source = source;
this.scheduler = scheduler;
}
@Override
protected void subscribeActual(Subscriber<? super T> subscriber) {
source.subscribe(new LockstepObserveOnSubscriber<>(
subscriber, scheduler.createWorker()));
}
@Override
public Publisher<T> apply(Flowable<T> upstream) {
return new LockstepObserveOn<>(upstream, scheduler);
}
static final class LockstepObserveOnSubscriber<T>
implements FlowableSubscriber<T>, Subscription, Runnable {
final Subscriber<? super T> actual;
final Worker worker;
final AtomicReference<T> item;
final AtomicLong requested;
final AtomicInteger wip;
Subscription upstream;
volatile boolean cancelled;
volatile boolean done;
Throwable error;
long emitted;
LockstepObserveOnSubscriber(Subscriber<? super T> actual, Worker worker) {
this.actual = actual;
this.worker = worker;
this.item = new AtomicReference<>();
this.requested = new AtomicLong();
this.wip = new AtomicInteger();
}
@Override
public void onSubscribe(Subscription s) {
upstream = s;
actual.onSubscribe(this);
s.request(1);
}
@Override
public void onNext(T t) {
item.lazySet(t);
schedule();
}
@Override
public void onError(Throwable t) {
error = t;
done = true;
schedule();
}
@Override
public void onComplete() {
done = true;
schedule();
}
@Override
public void request(long n) {
BackpressureHelper.add(requested, n);
schedule();
}
@Override
public void cancel() {
cancelled = true;
upstream.cancel();
worker.dispose();
if (wip.getAndIncrement() == 0) {
item.lazySet(null);
}
}
void schedule() {
if (wip.getAndIncrement() == 0) {
worker.schedule(this);
}
}
@Override
public void run() {
int missed = 1;
long e = emitted;
for (;;) {
long r = requested.get();
while (e != r) {
if (cancelled) {
item.lazySet(null);
return;
}
boolean d = done;
T v = item.get();
boolean empty = v == null;
if (d && empty) {
Throwable ex = error;
if (ex == null) {
actual.onComplete();
} else {
actual.onError(ex);
}
worker.dispose();
return;
}
if (empty) {
break;
}
item.lazySet(null);
upstream.request(1);
actual.onNext(v);
e++;
}
if (e == r) {
if (cancelled) {
item.lazySet(null);
return;
}
if (done && item.get() == null) {
Throwable ex = error;
if (ex == null) {
actual.onComplete();
} else {
actual.onError(ex);
}
worker.dispose();
return;
}
}
emitted = e;
missed = wip.addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
}
}
}