单击按钮后,为什么后台工作人员不会暂停?

时间:2013-09-10 22:26:51

标签: c# winforms

在Form1中,我删除/删除了_busy变量。在Form1顶部,我做了:

BackgroundWebCrawling bgwc;

然后在button4暂停点击事件我做了:

private void button4_Click(object sender, EventArgs e)
{
    bgwc.PauseWorker();
    label6.Text = "Process Paused";
    button5.Enabled = true;
    button4.Enabled = false;
}

在button5点击事件按钮中,我做了:

private void button5_Click(object sender, EventArgs e)
{
    bgwc.ContinueWorker();
    label6.Text = "Process Resumed";
    button4.Enabled = true;
    button5.Enabled = false;
}

取消按钮点击事件:

private void button3_Click(object sender, EventArgs e)
{
    bgwc.CancelWorker();
    cancel = true;
}

然后我在Form1已完成的事件中检查取消是否为真:

if (cancel == true)
{
    label6.Text = "Process Cancelled";
}
else
{
    label6.Text = "Process Completed";
}

这就是BackgroundWebCrawling类现在的样子:

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using HtmlAgilityPack;
using System.Net;
using System.Windows.Forms;
using System.ComponentModel;
using System.Threading;

namespace GatherLinks
{
    class BackgroundWebCrawling
    {
        public string f;
        int counter = 0;
        List<string> WebSitesToCrawl;
        int MaxSimultaneousThreads;
        public BackgroundWorker mainBackGroundWorker;
        BackgroundWorker secondryBackGroundWorker;
        WebcrawlerConfiguration webcrawlerCFG;
        List<WebCrawler> webcrawlers;
        int maxlevels;
        public event EventHandler<BackgroundWebCrawlingProgressEventHandler> ProgressEvent;
        ManualResetEvent _busy = new ManualResetEvent(true);

        public BackgroundWebCrawling()
        {
            webcrawlers = new List<WebCrawler>();
            mainBackGroundWorker = new BackgroundWorker();
            mainBackGroundWorker.WorkerSupportsCancellation = true;
            mainBackGroundWorker.DoWork += mainBackGroundWorker_DoWork;
        }

        private void mainBackGroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            BackgroundWorker worker = sender as BackgroundWorker;
            for (int i = 0; i < WebSitesToCrawl.Count; i++)
            {
                _busy.WaitOne();
                if ((worker.CancellationPending == true))
                {
                    e.Cancel = true;
                    break;
                }
                while (counter >= MaxSimultaneousThreads)
                {
                    Thread.Sleep(10);
                }

                WebCrawler wc = new WebCrawler(webcrawlerCFG);
                webcrawlers.Add(wc);
                counter++;
                secondryBackGroundWorker = new BackgroundWorker();
                secondryBackGroundWorker.DoWork += secondryBackGroundWorker_DoWork;
                object[] args = new object[] { wc, WebSitesToCrawl[i] };
                secondryBackGroundWorker.RunWorkerAsync(args);
            }
            while (counter > 0)
            {
                Thread.Sleep(10);
            }
        }

        private void secondryBackGroundWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            object[] args = (object[])e.Argument;
            WebCrawler wc = (WebCrawler)args[0];
            string mainUrl = (string)args[1];
            wc.ProgressEvent += new EventHandler<WebCrawler.WebCrawlerProgressEventHandler>(x_ProgressEvent);
            wc.webCrawler(mainUrl, maxlevels);

            counter--;
        }

        public void Start(List<string> sitestocrawl, int threadsNumber, int maxlevels, WebcrawlerConfiguration wccfg)
        {
            this.maxlevels = maxlevels;
            webcrawlerCFG = wccfg;
            WebSitesToCrawl = sitestocrawl;
            MaxSimultaneousThreads = threadsNumber;
            mainBackGroundWorker.RunWorkerAsync();
        }

        private void x_ProgressEvent(object sender, WebCrawler.WebCrawlerProgressEventHandler e)
        {
            // OK .. so now you get the data here in e
            // and here you should call the event to form1
            Object[] temp_arr = new Object[8];
            temp_arr[0] = e.csFiles;
            temp_arr[1] = e.mainUrl;
            temp_arr[2] = e.levels;
            temp_arr[3] = e.currentCrawlingSite;
            temp_arr[4] = e.sitesToCrawl;
            temp_arr[5] = e.done;
            temp_arr[6] = e.failedUrls;
            temp_arr[7] = e.failed;
            OnProgressEvent(temp_arr); /// Send the data + additional data from this class to Form1..
                                       ///
            /*
             * temp_arr[0] = csFiles;
                temp_arr[1] = mainUrl;
                temp_arr[2] = levels;
                temp_arr[3] = currentCrawlingSite;
                temp_arr[4] = sitesToCrawl;*/
        }

        private void GetLists(List<string> allWebSites)
        {

        }

        public class BackgroundWebCrawlingProgressEventHandler : EventArgs
        {
            public List<string> csFiles { get; set; }
            public string mainUrl { get; set; }
            public int levels { get; set; }
            public List<string> currentCrawlingSite { get; set; }
            public List<string> sitesToCrawl { get; set; }
            public bool done { get; set; }
            public int failedUrls { get; set; }
            public bool failed { get; set; }
        }

        protected void OnProgressEvent(Object[] some_params) // Probably you need to some vars here to...
        {
            // some_params to put in evenetArgs..
            if (ProgressEvent != null)
                ProgressEvent(this,
                    new BackgroundWebCrawlingProgressEventHandler()
                    {
                        csFiles = (List<string>)some_params[0],
                        mainUrl = (string)some_params[1],
                        levels = (int)some_params[2],
                        currentCrawlingSite = (List<string>)some_params[3],
                        sitesToCrawl = (List<string>)some_params[4],
                        done = (bool)some_params[5],
                        failedUrls = (int)some_params[6],
                        failed = (bool)some_params[7]
                    });
        }

        public void PauseWorker()
        {
            if (mainBackGroundWorker.IsBusy)
            {
                _busy.Reset();
            }
        }

        public void ContinueWorker()
        {
            _busy.Set();
        }

        public void CancelWorker()
        {
            ContinueWorker();
            mainBackGroundWorker.CancelAsync();
        }

    }
}

所以我添加了暂停的方法继续取消。在dowork事件中,我改变了所有的东西并添加了东西。

但是当我点击按钮时没有效果。不暂停,不继续,不取消。什么都没有。

1 个答案:

答案 0 :(得分:3)

您永远不会在_busy方法中检查mainBackGroundWorker_DoWork状态;

for (int i = 0; i < WebSitesToCrawl.Count; i++)
{
    _busy.WaitOne();
    //...
}

您也应该在课堂上使用BackgroundWorker

进行ManualResetEvent _busy
ManualResetEvent _busy = new ManualResetEvent(true);
public BackgroundWorker mainBackGroundWorker;

public void PauseWorker()
{
   if(mainBackGroundWorker.IsBusy)
   {
        _busy.Reset(); 
   }
}

public void ContinueWorker()
{
    _busy.Set();
}

Form1

private void button4_Click(object sender, EventArgs e)
{
    bgwc.PauseWorker();
    //...
}

private void button5_Click(object sender, EventArgs e)
{
    bgwc.ContinueWorker();
    //...
}

要取消BackgroundWorker,您可以使用CancellationPending属性和CancelAsync方法。注意:您应该首先取消该工作人员。

public void CancelWorker()
{
   ContinueWorker();
   mainBackGroundWorker.CancelAsync();
}

private void mainBackGroundWorker_DoWork(object sender, DoWorkEventArgs e)
{
    BackgroundWorker worker = sender as BackgroundWorker;
    for (int i = 0; i < WebSitesToCrawl.Count; i++)
    {
        _busy.WaitOne();
         if ((worker.CancellationPending == true))
         {
             e.Cancel = true;
             break;
         }
         //...
    }  
}

如果这对您没有帮助,那么您在使用mainBackGroundWorker代码和secondryBackGroundWorker时遇到问题。

  1. 此代码仅暂停mainBackGroundWorker,但不会暂停secondryBackGroundWorkers。与取消相同。如果主要工人被取消了?它将等待所有二级工人完成他们的工作。如果你暂停主要工作人员?你仍然可以从二级工人那里得到新的结果。

  2. 您不会处理错误。如果您在第二个工作人员中有例外,那么您没有收到任何关于此的通知,并且您的主要工作人员也永远不会停止,因为counter永远不会是0

  3. 可能还有其他问题,导致这种行为。