コード例 #1
0
ファイル: ConnectRequestHandler.cpp プロジェクト: iokays/ice
void
ConnectRequestHandler::asyncRequestCanceled(const OutgoingAsyncBasePtr& outAsync, const Ice::LocalException& ex)
{
    {
        Lock sync(*this);
        if(ICE_EXCEPTION_GET(_exception))
        {
            return; // The request has been notified of a failure already.
        }

        if(!initialized())
        {
            for(deque<Request>::iterator p = _requests.begin(); p != _requests.end(); ++p)
            {
                if(p->outAsync.get() == outAsync.get())
                {
                    _requests.erase(p);
                    if(outAsync->completed(ex))
                    {
                        outAsync->invokeCompletedAsync();
                    }
                    return;
                }
            }
        }
    }
    _connection->asyncRequestCanceled(outAsync, ex);
}
コード例 #2
0
void
CollocatedRequestHandler::sendResponse(Int requestId, BasicStream* os, Byte, bool amd)
{
    OutgoingAsyncBasePtr outAsync;
    {
        Lock sync(*this);
        assert(_response);

        os->i = os->b.begin() + sizeof(replyHdr) + 4;

        if(_traceLevels->protocol >= 1)
        {
            fillInValue(os, 10, static_cast<Int>(os->b.size()));
            traceRecv(*os, _logger, _traceLevels);
        }

        map<int, OutgoingBase*>::iterator p = _requests.find(requestId);
        if(p != _requests.end())
        {
            p->second->completed(*os);
            _requests.erase(p);
        }
        else
        {
            map<int, OutgoingAsyncBasePtr>::iterator q = _asyncRequests.find(requestId);
            if(q != _asyncRequests.end())
            {
                os->swap(*q->second->getIs());
                if(q->second->completed())
                {
                    outAsync = q->second;
                }
                _asyncRequests.erase(q);
            }
        }
    }

    if(outAsync)
    {
        //
        // If called from an AMD dispatch, invoke asynchronously
        // the completion callback since this might be called from
        // the user code.
        //
        if(amd)
        {
            outAsync->invokeCompletedAsync();
        }
        else
        {
            outAsync->invokeCompleted();
        }
    }

    _adapter->decDirectCount();
}
コード例 #3
0
void
CollocatedRequestHandler::sendResponse(Int requestId, OutputStream* os, Byte, bool amd)
{
    OutgoingAsyncBasePtr outAsync;
    {
        Lock sync(*this);
        assert(_response);

        if(_traceLevels->protocol >= 1)
        {
            fillInValue(os, 10, static_cast<Int>(os->b.size()));
        }

        InputStream is(os->instance(), os->getEncoding(), *os, true); // Adopting the OutputStream's buffer.
        is.pos(sizeof(replyHdr) + 4);

        if(_traceLevels->protocol >= 1)
        {
            traceRecv(is, _logger, _traceLevels);
        }

        map<int, OutgoingAsyncBasePtr>::iterator q = _asyncRequests.find(requestId);
        if(q != _asyncRequests.end())
        {
            is.swap(*q->second->getIs());
            if(q->second->response())
            {
                outAsync = q->second;
            }
            _asyncRequests.erase(q);
        }
    }

    if(outAsync)
    {
        //
        // If called from an AMD dispatch, invoke asynchronously
        // the completion callback since this might be called from
        // the user code.
        //
        if(amd)
        {
            outAsync->invokeResponseAsync();
        }
        else
        {
            outAsync->invokeResponse();
        }
    }

    _adapter->decDirectCount();
}
コード例 #4
0
void
CollocatedRequestHandler::handleException(int requestId, const Exception& ex, bool amd)
{
    if(requestId == 0)
    {
        return; // Ignore exception for oneway messages.
    }

    OutgoingAsyncBasePtr outAsync;
    {
        Lock sync(*this);

        map<int, OutgoingBase*>::iterator p = _requests.find(requestId);
        if(p != _requests.end())
        {
            p->second->completed(ex);
            _requests.erase(p);
        }
        else
        {
            map<int, OutgoingAsyncBasePtr>::iterator q = _asyncRequests.find(requestId);
            if(q != _asyncRequests.end())
            {
                if(q->second->completed(ex))
                {
                    outAsync = q->second;
                }
                _asyncRequests.erase(q);
            }
        }
    }

    if(outAsync)
    {
        //
        // If called from an AMD dispatch, invoke asynchronously
        // the completion callback since this might be called from
        // the user code.
        //
        if(amd)
        {
            outAsync->invokeCompletedAsync();
        }
        else
        {
            outAsync->invokeCompleted();
        }
    }
}
コード例 #5
0
void
CollocatedRequestHandler::asyncRequestCanceled(const OutgoingAsyncBasePtr& outAsync, const LocalException& ex)
{
    Lock sync(*this);

    map<OutgoingAsyncBasePtr, Int>::iterator p = _sendAsyncRequests.find(outAsync);
    if(p != _sendAsyncRequests.end())
    {
        if(p->second > 0)
        {
            _asyncRequests.erase(p->second);
        }
        _sendAsyncRequests.erase(p);
        if(outAsync->exception(ex))
        {
            outAsync->invokeExceptionAsync();
        }
        _adapter->decDirectCount(); // invokeAll won't be called, decrease the direct count.
        return;
    }

    OutgoingAsyncPtr o = ICE_DYNAMIC_CAST(OutgoingAsync, outAsync);
    if(o)
    {
        for(map<Int, OutgoingAsyncBasePtr>::iterator q = _asyncRequests.begin(); q != _asyncRequests.end(); ++q)
        {
            if(q->second.get() == o.get())
            {
                _asyncRequests.erase(q);
                if(outAsync->exception(ex))
                {
                    outAsync->invokeExceptionAsync();
                }
                return;
            }
        }
    }
}
コード例 #6
0
void
CollocatedRequestHandler::asyncRequestCanceled(const OutgoingAsyncBasePtr& outAsync, const LocalException& ex)
{
    Lock sync(*this);

    map<OutgoingAsyncBasePtr, Int>::iterator p = _sendAsyncRequests.find(outAsync);
    if(p != _sendAsyncRequests.end())
    {
        if(p->second > 0)
        {
            _asyncRequests.erase(p->second);
        }
        _sendAsyncRequests.erase(p);
        if(outAsync->completed(ex))
        {
            outAsync->invokeCompletedAsync();
        }
        return;
    }

    OutgoingAsyncPtr o = OutgoingAsyncPtr::dynamicCast(outAsync);
    if(o)
    {
        for(map<Int, OutgoingAsyncBasePtr>::iterator q = _asyncRequests.begin(); q != _asyncRequests.end(); ++q)
        {
            if(q->second.get() == o.get())
            {
                _asyncRequests.erase(q);
                if(outAsync->completed(ex))
                {
                    outAsync->invokeCompletedAsync();
                }
                return;
            }
        }
    }
}
コード例 #7
0
AsyncStatus
ConnectionRequestHandler::sendAsyncRequest(const OutgoingAsyncBasePtr& out)
{
    return out->send(_connection, _compress, _response);
}
コード例 #8
0
ファイル: OutgoingAsync.cpp プロジェクト: lmtoo/ice
void
CommunicatorFlushBatchAsync::flushConnection(const ConnectionIPtr& con)
{
    class FlushBatch : public OutgoingAsyncBase
    {
    public:

        FlushBatch(const CommunicatorFlushBatchAsyncPtr& outAsync,
                   const InstancePtr& instance,
                   InvocationObserver& observer) :
            OutgoingAsyncBase(instance), _outAsync(outAsync), _observer(observer)
        {
        }

        virtual bool
        sent()
        {
            _childObserver.detach();
            _outAsync->check(false);
            return false;
        }

        virtual bool
        exception(const Exception& ex)
        {
            _childObserver.failed(ex.ice_id());
            _childObserver.detach();
            _outAsync->check(false);
            return false;
        }

        virtual InvocationObserver&
        getObserver()
        {
            return _observer;
        }

        virtual bool handleSent(bool, bool)
        {
            return false;
        }

        virtual bool handleException(const Ice::Exception&)
        {
            return false;
        }

        virtual bool handleResponse(bool)
        {
            return false;
        }

        virtual void handleInvokeSent(bool, OutgoingAsyncBase*) const
        {
            assert(false);
        }

        virtual void handleInvokeException(const Ice::Exception&, OutgoingAsyncBase*) const
        {
            assert(false);
        }

        virtual void handleInvokeResponse(bool, OutgoingAsyncBase*) const
        {
            assert(false);
        }

    private:

        const CommunicatorFlushBatchAsyncPtr _outAsync;
        InvocationObserver& _observer;
    };

    {
        Lock sync(_m);
        ++_useCount;
    }

    try
    {
        OutgoingAsyncBasePtr flushBatch = ICE_MAKE_SHARED(FlushBatch, ICE_SHARED_FROM_THIS, _instance, _observer);
        int batchRequestNum = con->getBatchRequestQueue()->swap(flushBatch->getOs());
        if(batchRequestNum == 0)
        {
            flushBatch->sent();
        }
        else
        {
            con->sendAsyncRequest(flushBatch, false, false, batchRequestNum);
        }
    }
    catch(const LocalException&)
    {
        check(false);
        throw;
    }
}