Example #1
0
void
BatchRequestQueue::finishBatchRequest(OutputStream* os, const Ice::ObjectPrxPtr& proxy, const std::string& operation)
{
    //
    // No need for synchronization, no other threads are supposed
    // to modify the queue since we set _batchStreamInUse to true.
    //
    assert(_batchStreamInUse);
    _batchStream.swap(*os);

    try
    {
        _batchStreamCanFlush = true; // Allow flush to proceed even if the stream is marked in use.

        if(_maxSize > 0 && _batchStream.b.size() >= _maxSize)
        {
#ifdef ICE_CPP11_MAPPING
            proxy->ice_flushBatchRequests_async();
#else
            proxy->begin_ice_flushBatchRequests();
#endif
        }

        assert(_batchMarker < _batchStream.b.size());
        if(_interceptor)
        {
            BatchRequestI request(*this, proxy, operation, static_cast<int>(_batchStream.b.size() - _batchMarker));
#ifdef ICE_CPP11_MAPPING
            _interceptor(request, _batchRequestNum, static_cast<int>(_batchMarker));
#else
            _interceptor->enqueue(request, _batchRequestNum, static_cast<int>(_batchMarker));
#endif
        }
        else
        {
            _batchMarker = _batchStream.b.size();
            ++_batchRequestNum;
        }

        Lock sync(*this);
        _batchStream.resize(_batchMarker);
        _batchStreamInUse = false;
        _batchStreamCanFlush = false;
        notifyAll();
    }
    catch(const std::exception&)
    {
        Lock sync(*this);
        _batchStream.resize(_batchMarker);
        _batchStreamInUse = false;
        _batchStreamCanFlush = false;
        notifyAll();
        throw;
    }
}