我正在尝试使用Apache HTTPClient 4.3.6连接池管理器来增加HTTP调用的吞吐量。我的假设是,HTTPClient实现通常使用持久性连接。但是,我的测试代码(包含在最后)的结果表明,使用JDK URLConnection
的多个并发HTTP连接的性能更好。
HTTPClient
? HTTPClient
http://localhost:9000/user/123
和http://localhost:9000/user/456
使用相同的HTTP连接吗?由于
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
public class FooTest {
public static void main(String[] args) throws Exception {
runWithConnectionPool();
}
private static String extract(BufferedReader reader) throws Exception {
StringBuilder buffer = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
buffer.append(line);
}
return buffer.toString();
}
private static void runWithConnectionPool() throws Exception {
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
cm.setMaxTotal(1);
CloseableHttpClient httpClient = HttpClients.custom()
.setConnectionManager(cm)
.setMaxConnTotal(100)
.setMaxConnPerRoute(100)
.build();
long start = System.currentTimeMillis();
HttpGet getReq = new HttpGet("http://www.google.com");
/*
Option A: Using HTTP connection pool
Option B: Individual JDK 8 URL connection
*/
// Thread[] workers = generateAndStart(10, httpClient, getReq, 0); // (A)
Thread[] workers = generateAndStart(10, getReq.getURI().toURL(), 0); // (B)
for (int i = 0; i < workers.length; i++) {
workers[i].join();
}
System.out.println("Elasped: " + (System.currentTimeMillis() - start));
}
private static Thread[] generateAndStart(int num, URL url, long delay) {
Thread[] workers = new Thread[num];
for (int i = 0; i < num; i++) {
System.out.println("Starting worker: " + i);
int j = i;
workers[i] = new Thread(() -> connect(url, delay, j));
workers[i].start();
}
return workers;
}
private static void connect(URL url, long delay, int ndx) {
try {
System.out.println(url.toURI().toString() + " started.");
} catch (Exception e) {
e.printStackTrace();
}
try {
URLConnection connection = url.openConnection();
connection.addRequestProperty("Accept", "application/json");
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
ObjectMapper mapper = new ObjectMapper();
System.out.println(line);
}
if (delay > 0) {
System.out.println("Delayed.");
sleep(delay);
}
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private static Thread[] generateAndStart(int num, CloseableHttpClient httpClient, HttpGet getReq, long delay) {
Thread[] workers = new Thread[num];
for (int i = 0; i < num; i++) {
System.out.println("Starting worker: " + i);
final int j = i;
workers[i] = new Thread(() -> connect(httpClient, getReq, delay, j));
workers[i].start();
}
return workers;
}
private static void connect(CloseableHttpClient httpClient, HttpGet request, long delay, int ndx) {
System.out.println(request.getURI().toString() + " started.");
try(
CloseableHttpResponse response = httpClient.execute(request, HttpClientContext.create());
BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent()))) {
String line;
while ((line = reader.readLine()) != null) {
ObjectMapper mapper = new ObjectMapper();
System.out.println(line);
}
if (delay > 0) {
System.out.println("Delayed.");
sleep(delay);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private static void sleep(long delay) {
try {
Thread.sleep(delay);
} catch (Exception e) {
e.printStackTrace();
}
}
}
我已经提出了一些意见和结论,
java.net.URLConnection
在调用URLConnection.getInputStream()
之前不会建立连接。java.net.URLConnection
会关闭当前套接字,例如HTTP错误,并创建一个新套接字。java.net.URLConnection
实例创建的java.net.URL
实例将为服务器创建多个套接字。相反,为简单起见,请在URL.openConnection()
块中调用synchronized
。 URL.openConnection()
都会创建一个新套接字。我相信URL
会对此作出规定。URL.openConnection()
的线程数。URL
遵守http.maxConnections
和http.keepAlive
。例如,我在运行时包含-Dhttp.keepAlive=false
并不会阻止HTTP标头中包含Connection: keep-alive
。我的观察来自我粘贴的例子here。它们是比上面粘贴的代码更好的例子。
答案 0 :(得分:0)
我在尝试使用JDK URLConnection
和Apache HTTPClient后找到了答案。
URLConnection
很快,因为它为每个线程向服务器建立的每个连接打开新的套接字,而Apache HTTPClient根据多线程环境中的设置控制套接字的数量。当套接字限制为少时,两个HTTP库的总连接时间大致相同。mitmproxy是一个用于HTTP连接验证的好用且易于使用的工具。