同时处理多个线程的数据

时间:2013-10-26 16:23:03

标签: c# .net multithreading

我们有一个定期收到彩信的应用程序,应该回复它们。

我们目前使用单个线程执行此操作,首先接收消息,然后逐个处理它们。这样做,但速度很慢。

所以我们现在考虑做同样的过程,但同时使用多个线程。

允许并行处理传入记录的任何简单方法,但避免错误地通过两个线程处理相同的记录?

1 个答案:

答案 0 :(得分:1)

  

允许并行处理传入记录的任何简单方法,但避免错误地通过两个线程处理相同的记录?

是的,实际上并不太难,你想做的事情被称为“生产者 - 消费者模式”

如果您的消息接收者一次只能处理一个线程,但您的消息“处理器”可以同时处理多个消息,您只需要使用BlockingCollection来存储需要处理的工作

public sealed class MessageProcessor : IDisposable
{
    public MessageProcessor() 
        : this(-1)
    {   
    }

    public MessageProcessor(int maxThreadsForProcessing)
    {
        _maxThreadsForProcessing = maxThreadsForProcessing;
        _messages = new BlockingCollection<Message>();
        _cts = new CancellationTokenSource();

        _messageProcessorThread = new Thread(ProcessMessages);
        _messageProcessorThread.IsBackground = true;
        _messageProcessorThread.Name = "Message Processor Thread";
        _messageProcessorThread.Start();
    }

    public int MaxThreadsForProcessing
    {
        get { return _maxThreadsForProcessing; }
    }

    private readonly BlockingCollection<Message> _messages;
    private readonly CancellationTokenSource _cts;
    private readonly Thread _messageProcessorThread;
    private bool _disposed = false;
    private readonly int _maxThreadsForProcessing;


    /// <summary>
    /// Add a new message to be queued up and processed in the background.
    /// </summary>
    public void ReceiveMessage(Message message)
    {
       _messages.Add(message);
    }

    /// <summary>
    /// Signals the system to stop processing messages.
    /// </summary>
    /// <param name="finishQueue">Should the queue of messages waiting to be processed be allowed to finish</param>
    public void Stop(bool finishQueue)
    {
        _messages.CompleteAdding();
        if(!finishQueue)
            _cts.Cancel();

        //Wait for the message processor thread to finish it's work.
        _messageProcessorThread.Join();
    }

    /// <summary>
    /// The background thread that processes messages in the system
    /// </summary>
    private void ProcessMessages()
    {
        try
        {
            Parallel.ForEach(_messages.GetConsumingEnumerable(),
                         new ParallelOptions()
                         {
                             CancellationToken = _cts.Token,
                             MaxDegreeOfParallelism = MaxThreadsForProcessing
                         },
                         ProcessMessage);
        }
        catch (OperationCanceledException)
        {
            //Don't care that it happened, just don't want it to bubble up as a unhandeled exception.
        }
    }

    private void ProcessMessage(Message message, ParallelLoopState loopState)
    {
        //Here be dragons! (or your code to process a message, your choice :-))

        //Use if(_cts.Token.IsCancellationRequested || loopState.ShouldExitCurrentIteration) to test if 
        // we should quit out of the function early for a graceful shutdown.
    }

    public void Dispose()
    {
        if(!_disposed)
        {
            if(_cts != null && _messages != null && _messageProcessorThread != null)
                Stop(true); //This line will block till all queued messages have been processed, if you want it to be quicker you need to call `Stop(false)` before you dispose the object.

            if(_cts != null)
                _cts.Dispose();

            if(_messages != null)
                _messages.Dispose();

            GC.SuppressFinalize(this);
           _disposed = true;
        }
    }

    ~MessageProcessor()
    {
        //Nothing to do, just making FXCop happy.
    }

}

我强烈建议您阅读免费书籍Patterns for Parallel Programming,它会详细介绍这一点。整个部分详细解释了生产者 - 消费者模型。


更新GetConsumingEnumerable()Parallel.ForEach(存在一些性能问题,而是使用库ParallelExtensionsExtras,它是新的扩展方法GetConsumingPartitioner() < / p>

public static Partitioner<T> GetConsumingPartitioner<T>(
    this BlockingCollection<T> collection)
{
    return new BlockingCollectionPartitioner<T>(collection);
}

private class BlockingCollectionPartitioner<T> : Partitioner<T>
{
    private BlockingCollection<T> _collection;

    internal BlockingCollectionPartitioner(
        BlockingCollection<T> collection)
    {
        if (collection == null)
            throw new ArgumentNullException("collection");
        _collection = collection;
    }

    public override bool SupportsDynamicPartitions {
        get { return true; }
    }

    public override IList<IEnumerator<T>> GetPartitions(
        int partitionCount)
    {
        if (partitionCount < 1)
            throw new ArgumentOutOfRangeException("partitionCount");
        var dynamicPartitioner = GetDynamicPartitions();
        return Enumerable.Range(0, partitionCount).Select(_ =>
            dynamicPartitioner.GetEnumerator()).ToArray();
    }

    public override IEnumerable<T> GetDynamicPartitions()
    {
        return _collection.GetConsumingEnumerable();
    }
}