使用HttpWebRequest获取功能时,TCP连接将堆叠到65k - 互联网连接丢失

时间:2017-03-21 00:31:48

标签: c# tcp httpwebrequest webrequest webproxy

我不明白可能导致此错误的原因

我正在使用以下函数,大约有1000个并发连接

每个连接使用不同的webproxy

经过一段时间工作15分钟后,已建立的TCP连接计数开始堆叠,互联网连接丢失

当我不使用任何webproxy时,我不会遇到任何错误

我正在使用以下函数来检索活动的TCP连接数

var properties = IPGlobalProperties.GetIPGlobalProperties();

我的功能中没有发现任何泄漏

所以我需要你的帮助来解决这个恼人的问题

c#.net 4.6.2

此问题发生时活动TCP连接的状态

enter image description here

public static cs_HttpFetchResults func_fetch_Page(
    string srUrl, int irTimeOut = 60,
    string srRequestUserAgent = "Mozilla/5.0 (Windows NT 6.3; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0",
    string srProxy = null, int irCustomEncoding = 0, bool blAutoDecode = true, bool blKeepAlive = true)
{
    cs_HttpFetchResults mycs_HttpFetchResults = new cs_HttpFetchResults();
    mycs_HttpFetchResults.srFetchingFinalURL = srUrl;

    HttpWebRequest request = null;
    WebResponse response = null;

    try
    {
        request = (HttpWebRequest)WebRequest.Create(srUrl);
        request.CookieContainer = new System.Net.CookieContainer();

        if (srProxy != null)
        {
            string srProxyHost = srProxy.Split(':')[0];
            int irProxyPort = Int32.Parse(srProxy.Split(':')[1]);
            WebProxy my_awesomeproxy = new WebProxy(srProxyHost, irProxyPort);
            my_awesomeproxy.Credentials = new NetworkCredential();
            request.Proxy = my_awesomeproxy;
        }
        else
        {
            request.Proxy = null;
        }

        request.ContinueTimeout = irTimeOut * 1000;
        request.ReadWriteTimeout = irTimeOut * 1000;
        request.Timeout = irTimeOut * 1000;
        request.UserAgent = srRequestUserAgent;
        request.KeepAlive = blKeepAlive;
        request.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";

        WebHeaderCollection myWebHeaderCollection = request.Headers;
        myWebHeaderCollection.Add("Accept-Language", "en-gb,en;q=0.5");
        myWebHeaderCollection.Add("Accept-Encoding", "gzip, deflate");

        request.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;

        using (response = request.GetResponse())
        {
            using (Stream strumien = response.GetResponseStream())
            {
                Encoding myEncoding = Encoding.UTF8;
                string srContentType = "";

                if (response.ContentType != null)
                {
                    srContentType = response.ContentType;
                    if (srContentType.Contains(";"))
                    {
                        srContentType = srContentType.Split(';')[1];
                    }
                    srContentType = srContentType.Replace("charset=", "");
                    srContentType = func_Process_Html_Input(srContentType);
                }

                try
                {
                    myEncoding = Encoding.GetEncoding(srContentType);
                }
                catch
                {
                    myEncoding = irCustomEncoding == 0 ? Encoding.UTF8 : Encoding.GetEncoding(irCustomEncoding);
                }

                using (StreamReader sr = new StreamReader(strumien, myEncoding))
                {
                    mycs_HttpFetchResults.srFetchBody = sr.ReadToEnd();
                    if (blAutoDecode == true)
                    {
                        mycs_HttpFetchResults.srFetchBody = HttpUtility.HtmlDecode(mycs_HttpFetchResults.srFetchBody);
                    }
                    mycs_HttpFetchResults.srFetchingFinalURL = Return_Absolute_Url(response.ResponseUri.AbsoluteUri.ToString(), response.ResponseUri.AbsoluteUri.ToString());
                    mycs_HttpFetchResults.blResultSuccess = true;
                }
            }
        }

        if (request != null)
            request.Abort();
        request = null;
    }
    catch (Exception E)
    {
        if (E.Message.ToString().Contains("(404)"))
            mycs_HttpFetchResults.bl404 = true;

        csLogger.logCrawlingErrors("crawling failed url: " + srUrl, E);

    }
    finally
    {
        if (request != null)
            request.Abort();
        request = null;

        if (response != null)
            response.Close();
        response = null;
    }

    return mycs_HttpFetchResults;
}

0 个答案:

没有答案