说,我有一个网址列表:
$ for i in `seq 1 90`; do echo "$RANDOM$RANDOM.blogspot.com" ; done >> /tmp/urls.txt
与使用go代码相比,我在C中的GET花费的时间更长。
这是C代码:
n_memory.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <curl/curl.h>
struct MemoryStruct {
char *memory;
size_t size;
};
static size_t
WriteMemoryCallback(void *contents, size_t size, size_t nmemb, void *userp)
{
size_t realsize = size * nmemb;
struct MemoryStruct *mem = (struct MemoryStruct *)userp;
char *ptr = realloc(mem->memory, mem->size + realsize + 1);
if(ptr == NULL) {
printf("not enough memory (realloc returned NULL)\n");
return 0;
}
mem->memory = ptr;
memcpy(&(mem->memory[mem->size]), contents, realsize);
mem->size += realsize;
mem->memory[mem->size] = 0;
return realsize;
}
int try_url(char *url);
int main(int argc, char **argv){
if (argc < 2){
fprintf(stderr, "error, argc\n");
return 1;
}
FILE *fp = fopen(argv[1],"r");
if (!fp){
fprintf(stderr, "fopen, argc\n");
return 1;
}
int count = 1;
char _line[2048];
char url[8192];
while ( fgets(_line, 1024, fp) ){
_line[strcspn(_line, "\r\n")] = 0;
char *part1 = "https://dns.google.com/resolve?name=";
char *part3 = "&type=A";
snprintf(url, 4096, "%s%s%s", part1, _line, part3);
printf("%d %s\n", count, url);
try_url(url);
if (count > 80){
break;
}
count++;
}
//try_url(argv[1]);
puts("Done");
return 0;
}
int try_url(char *url)
{
CURL *hnd;
CURLcode res;
struct curl_slist *slist1;
struct MemoryStruct chunk;
chunk.memory = malloc(1); /* will be grown as needed by the realloc above */
chunk.size = 0; /* no data at this point */
curl_global_init(CURL_GLOBAL_ALL);
hnd = curl_easy_init();
curl_easy_setopt(hnd, CURLOPT_NOPROGRESS, 1L);
curl_easy_setopt(hnd, CURLOPT_MAXREDIRS, 50L);
curl_easy_setopt(hnd, CURLOPT_HTTP_VERSION, (long)CURL_HTTP_VERSION_2TLS);
curl_easy_setopt(hnd, CURLOPT_SSL_VERIFYPEER, 0L);
curl_easy_setopt(hnd, CURLOPT_SSL_VERIFYHOST, 0L);
curl_easy_setopt(hnd, CURLOPT_TCP_KEEPALIVE, 1L);
curl_easy_setopt(hnd, CURLOPT_RESOLVE, slist1);
slist1 = NULL;
slist1 = curl_slist_append(slist1, "dns.google.com:443:172.217.5.110");
curl_easy_setopt(hnd, CURLOPT_SSL_VERIFYPEER, 0);
curl_easy_setopt(hnd, CURLOPT_SSL_VERIFYHOST, 0);
curl_easy_setopt(hnd, CURLOPT_URL, url);
curl_easy_setopt(hnd, CURLOPT_WRITEFUNCTION, WriteMemoryCallback);
curl_easy_setopt(hnd, CURLOPT_WRITEDATA, (void *)&chunk);
curl_easy_setopt(hnd, CURLOPT_USERAGENT, "libcurl-agent/1.0");
res = curl_easy_perform(hnd);
if(res != CURLE_OK) {
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
}
else {
printf("%lu bytes retrieved\n", (unsigned long)chunk.size);
}
curl_easy_cleanup(hnd);
free(chunk.memory);
curl_global_cleanup();
return 0;
}
这是执行代码:
n_get.go
package main
import (
"bufio"
"fmt"
"log"
"net/http"
"os"
"time"
)
func main() {
if len(os.Args) < 2 {
fmt.Println("Invalid usage")
os.Exit(1)
}
filename := os.Args[1]
f, err := os.Open(filename)
checkerr(err)
defer f.Close()
fscanner := bufio.NewScanner(f)
i := 1
for fscanner.Scan() {
text := fscanner.Text()
// https://dns.google.com/resolve?name=1.bp.blogspot.com&type=A
url := "https://dns.google.com/resolve?name=" + text + "&type=A"
//fmt.Println(i, url);
get_url(url)
if i == 80 {
break
}
i = i + 1
}
fmt.Println("Hello!")
}
func checkerr(err error) {
if err != nil {
fmt.Println(err)
log.Fatal(err)
}
}
func get_url(url string) int {
fmt.Println(url)
t1 := time.Now()
resp, err := http.Get(url)
t2 := time.Now()
checkerr(err)
fmt.Println(resp.Status)
diff := t2.Sub(t1)
fmt.Println(url, "Took us", diff)
if resp.StatusCode == 200 {
fmt.Println("OK")
return 0
} else {
fmt.Println("Failed")
return 1
}
}
我什至尝试用--resolve选项协助libcurl传递它可以使用的IP地址,从而免去了必须进行名称查找的麻烦。但是,这似乎并没有太大帮助。
即使尝试使用带有卷曲的--insecure
选项,也不会产生太大的凹痕。
下面是执行80个HTTPS GET的时间:
+------------------+-----------------+
| golang | c |
+------------------------------------+
| real 0m2.670s |real 0m20.024s|
| user 0m0.555s |user 0m13.393s|
| sys 0m0.086s |sys 0m0.242s |
+------------------------------------+
这有点不平衡,我正在寻找缩小差距的指针。 如何提高C代码的速度?任何观点将不胜感激。
答案 0 :(得分:3)
首先,不要每次尝试都运行所有curl初始化。这样做一次。
我也不认为您每次都需要做所有选择。
也不要执行1字节的malloc。只需将其保留为NULL。重新分配知道如何处理。