1

I have a service that is running constantly processing data, it receives requests to process new data through messaging. While it's busy processing new requests get merged together so that they are then all processed at once. An AutoResetEvent is used to notify the processor that a new request is available.

My question is in EventLoop should it be possible that currentRequest after the WaitOne to be null?

Is it bad practice to have the _eventAvailable.Set() outside of the lock(_eventLocker)? I moved it out so that it wouldn't start going at the WaitOne and immediately contest the lock(_eventLocker).

Any suggestions on how to better write the following code?

public sealed class RealtimeRunner : MarshalByRefObject
{
    /// <summary>
    /// The actual event, new events get merged into this if it is not null
    /// </summary>
    private Request _pendingRequest;

    /// <summary>
    /// Used to signal the runner thread when an event is available to process
    /// </summary>
    private readonly AutoResetEvent _eventAvailable = new AutoResetEvent(false);
    private readonly object _eventLocker = new object();


    /// <summary>
    /// Called on a background thread via messaging
    /// </summary>
    public void QueueEvent(RealtimeProcessorMessage newRequest)
    {
        bool mergedRequest;
        lock (_eventLocker)
        {
            if (_pendingRequest == null)
            {
                mergedRequest = false;
                _pendingRequest = new Request(newRequest, _engine);
            }
            else
            {
                mergedRequest = true;
                _pendingRequest.Merge(newRequest, _engine);
            }
        }

        _eventAvailable.Set();
    }


    /// <summary>
    /// This is running on its own thread
    /// </summary>
    private void EventLoop()
    {
        while (true)
        {
            // Block until something exists in _pendingRequest
            _eventAvailable.WaitOne();

            Request currentRequest;
            lock (_eventLocker)
            {
                currentRequest = _pendingRequest;
                _pendingRequest = null;
            }

            // CAN THIS EVER BE NULL?
            if (currentRequest == null) 
                continue;

            //do stuff with the currentRequest here
        }
    }
}
4

1 に答える 1

1

はい、if (currrentRequest == null)true と評価される可能性があります。を呼び出すために競合する 2 つのスレッドを考えてみましょう_eventAvailable.Set()。1 つは通話を完了し、もう 1 つはプリエンプトされます。その間、EventLoopスレッドは起動し、ループの反復全体を完了します。_pendingRequestが null で、WaitHandleが再びシグナルされるのを待っている状況が発生しました。

この問題に対するまったく異なる解決策を提示したいと思います。プロデューサー/コンシューマー パターンを使用すると、コードを簡素化できるようです。このパターンは、ブロッキング キューを使用して最も簡単に実装できます。クラスはそのBlockingCollectionようなキューを実装します。

public sealed class RealtimeRunner : MarshalByRefObject
{
  private BlockingCollection<Request> m_Queue = new BlockingCollection<Request>();

  public void QueueEvent(RealtimeProcessorMessage newRequest)
  {
    m_Queue.Add(new Request(newRequest _engine));
  }

  private void EventLoop()
  {
    while (true)
    {
      // This blocks until an item appears in the queue.
      Request request = m_Queue.Take();
      // Process the request here.
    }
  }
}
于 2011-05-06T16:19:00.827 に答える