我正在尝试在go中创建一个http客户端程序,这将产生许多http GET请求。我正在使用缓冲通道来限制并发请求的数量。
当我运行程序时,我得到了
Get http://198.18.96.213/: dial tcp 198.18.96.213:80: too many open files
这是我的计划:
package main
import (
"fmt"
"net/http"
"time"
)
func HttpGets(numRequests int, numConcurrent int, url string) map[int]int {
// I want number of requests with each status code
responseStatuses := map[int]int {
100: 0, 101 : 0, 102 : 0, 200 : 0, 201 : 0, 202 : 0, 203 : 0, 204 : 0, 205 : 0,
206 : 0, 207 : 0, 208 : 0, 226 : 0, 300 : 0, 301 : 0, 302 : 0, 303 : 0, 304 : 0,
305 : 0, 306 : 0, 307 : 0, 308 : 0, 400 : 0, 401 : 0, 402 : 0, 403 : 0, 404 : 0,
405 : 0, 406 : 0, 407 : 0, 408 : 0, 409 : 0, 410 : 0, 411 : 0, 412 : 0, 413 : 0,
414 : 0, 415 : 0, 416 : 0, 417 : 0, 421 : 0, 422 : 0, 423 : 0, 424 : 0, 425 : 0,
426 : 0, 427 : 0, 428 : 0, 429 : 0, 430 : 0, 431 : 0, 500 : 0, 501 : 0, 502 : 0,
503 : 0, 504 : 0, 505 : 0, 506 : 0, 507 : 0, 508 : 0, 509 : 0, 510 : 0, 511 : 0,
}
reqsDone := 0
ch := make(chan *http.Response, numConcurrent)
for i := 0; i < numRequests; i++ {
go func(url string) {
client := &http.Client{}
req, reqErr := http.NewRequest("GET", url, nil)
if reqErr != nil {
fmt.Println(reqErr)
}
// adding connection:close header hoping to get rid
// of too many files open error. Found this in http://craigwickesser.com/2015/01/golang-http-to-many-open-files/
req.Header.Add("Connection", "close")
resp, err := client.Do(req)
if (err !=nil) {
fmt.Println(err)
}
ch <- resp
}(url)
}
for {
select {
case r := <-ch:
reqsDone += 1 // probably needs a lock?
status := r.StatusCode
if _, ok := responseStatuses[status]; ok {
responseStatuses[status] += 1
} else {
responseStatuses[status] = 1
}
r.Body.Close() // trying to close body hoping to get rid of too many files open error
if (reqsDone == numRequests) {
return responseStatuses
}
}
}
return responseStatuses
}
func main() {
var numRequests, numConcurrent = 500, 10
url := "http://198.18.96.213/"
beginTime := time.Now()
results := HttpGets(numRequests, numConcurrent, url)
endTime := time.Since(beginTime)
fmt.Printf("Total time elapsed: %s\n", endTime)
for k,v := range results {
if v!=0 {
fmt.Printf("%d : %d\n", k, v)
}
}
}
如何确保关闭文件/通讯录,以便在发出多个请求时不会收到此错误?
答案 0 :(得分:1)
基本上你正在产生100个goroutine,它们会在块关闭之前开始连接。
这是一个快速(非常难看)的工作代码:
var (
responseStatuses = make(map[int]int, 63)
reqsDone = 0
urlCh = make(chan string, numConcurrent)
ch = make(chan *http.Response, numConcurrent)
)
log.Println(numConcurrent, numRequests, len(responseStatuses))
for i := 0; i < numConcurrent; i++ {
go func() {
for url := range urlCh {
client := &http.Client{}
req, reqErr := http.NewRequest("GET", url, nil)
if reqErr != nil {
fmt.Println(reqErr)
}
// adding connection:close header hoping to get rid
// of too many files open error. Found this in http://craigwickesser.com/2015/01/golang-http-to-many-open-files/
req.Header.Add("Connection", "close")
resp, err := client.Do(req)
if err != nil {
fmt.Println(err)
}
ch <- resp
}
}()
}
go func() {
for i := 0; i < numRequests; i++ {
urlCh <- url
}
close(urlCh)
}()
答案 1 :(得分:0)
我认为这是最简单的方法,https://github.com/leenanxi/nasync
//it has a simple usage
nasync.Do(yourAsyncTask)
答案 2 :(得分:0)
您可以使用以下库:
请求:一个Go库,用于减少发出HTTP请求(每请求20k / s)时的麻烦
https://github.com/alessiosavi/Requests
这个想法是分配一个请求列表,而不是发送带有可配置的“并行”因子的请求,该因子允许一次仅运行“ N”个请求。
// This array will contains the list of request
var reqs []requests.Request
// N is the number of request to run in parallel, in order to avoid "TO MANY OPEN FILES. N have to be lower than ulimit threshold"
var N int = 12
// Create the list of request
for i := 0; i < 1000; i++ {
// In this case, we init 1000 request with same URL,METHOD,BODY,HEADERS
req, err := requests.InitRequest("https://127.0.0.1:5000", "GET", nil, nil, true)
if err != nil {
// Request is not compliant, and will not be add to the list
log.Println("Skipping request [", i, "]. Error: ", err)
} else {
// If no error occurs, we can append the request created to the list of request that we need to send
reqs = append(reqs, *req)
}
}
这时,我们有了一个列表,其中包含必须发送的请求。 让我们并行发送它们!
// This array will contains the response from the givens request
var response []datastructure.Response
// send the request using N request to send in parallel
response = requests.ParallelRequest(reqs, N)
// Print the response
for i := range response {
// Dump is a method that print every information related to the response
log.Println("Request [", i, "] -> ", response[i].Dump())
// Or use the data present in the response
log.Println("Headers: ", response[i].Headers)
log.Println("Status code: ", response[i].StatusCode)
log.Println("Time elapsed: ", response[i].Time)
log.Println("Error: ", response[i].Error)
log.Println("Body: ", string(response[i].Body))
}
您可以在存储库的example文件夹中找到示例用法。
SPOILER :
我是这个小图书馆的作者