我有一个Web服务,可以通过HTTP调用另一个服务。该Web服务分解了一对多请求,并尝试进行并行的一对一请求。为了测试性能,我将吞吐量保持恒定。例如,我能够以1000ms的吞吐量实现1000 req / sec的吞吐量。因此,为了测试并行请求,每个并行请求将对Web服务的每个请求分解为2个后端请求,我发送了500个请求/秒,但仅实现了150毫秒99%的延迟。我是否使用以下代码创建线程争用和/或阻止http调用?
import java.util.HashMap;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;
public class Foo {
private HTTPClient myHTTPClient = new HTTPClient("http://my_host.com"); //java ws rs http client
private interface Handler<REQ, RES> {
RES work(REQ req);
}
private <REQ, RES> CompletableFuture<RES> getAsync(REQ req, Handler<REQ, RES> handler) {
CompletableFuture<RES> future = CompletableFuture.supplyAsync(() -> {
return handler.work(req);
});
return future;
}
public RouteCostResponse getRouteCost(Point sources, List<Point> destinations) {
Map<String, Request> requests = new HashMap<>();
// create request bodies and keep track of request id's
for (Point destination : destinations) {
requests.put(destination.getId(), new RouteCostRequest(source, destination))
}
//create futures
ConcurrentMap<String, CompletableFuture<RouteCost>> futures = requests.entrySet().parallelStream()
.collect(Collectors.toConcurrentMap(
entry -> entry.getKey(),
entry -> getAsync(entry.getValue(), route -> myHTTPClient.getRoute(route)))
));
//retrieve results
ConcurrentMap<String, RouteCost> result = futures.entrySet().parallelStream()
.collect(Collectors.toConcurrentMap(
entry -> entry.getKey(),
entry -> entry.getValue().join()
));
RouteCostResponse response = new RouteCostResponse(result);
return response;
}
}
答案 0 :(得分:0)
以下代码没有线程争用,尽管似乎我遇到了I / O问题。关键是使用显式线程池。 ForkJoinPool
或Executors.fixedThreadPool
import java.util.HashMap;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ForkJoinPool;
import java.util.stream.Collectors;
public class Foo {
private HTTPClient myHTTPClient = new HTTPClient("http://my_host.com"); //java ws rs http client
private static final ForkJoinPool pool = new ForkJoinPool(1000);
private interface Handler<REQ, RES> {
RES work(REQ req);
}
private <REQ, RES> CompletableFuture<RES> getAsync(REQ req, Handler<REQ, RES> handler) {
CompletableFuture<RES> future = CompletableFuture.supplyAsync(() -> {
return handler.work(req);
});
return future;
}
public RouteCostResponse getRouteCost(Point sources, List<Point> destinations) {
Map<String, Request> requests = new HashMap<>();
// create request bodies and keep track of request id's
for (Point destination : destinations) {
requests.put(destination.getId(), new RouteCostRequest(source, destination))
}
//create futures
ConcurrentMap<String, CompletableFuture<RouteCost>> futures = requests.entrySet().stream()
.collect(Collectors.toConcurrentMap(
entry -> entry.getKey(),
entry -> getAsync(entry.getValue(), route -> myHTTPClient.getRoute(route)))
));
//retrieve results
ConcurrentMap<String, RouteCost> result = futures.entrySet().stream()
.collect(Collectors.toConcurrentMap(
entry -> entry.getKey(),
entry -> entry.getValue().join()
));
RouteCostResponse response = new RouteCostResponse(result);
return response;
}
}