Esempio n. 1
0
void TSRImmediateDraw::RenderUnitQuad( TSRShaderEffect* _pFullScreenEffect )
{
    TSRVector2 halfRes( 1.0f, 1.0f );
    TSRImmediateDraw::Begin( TWISTER_RENDERMODE_TRIANGLESTRIP );
    TSRImmediateDraw::Color3f( 1.0f, 1.0f, 1.0f );
	TSRImmediateDraw::TexCoord2f( 1.0f, 1.0f );
	TSRImmediateDraw::Vertex2f( halfRes.x, -halfRes.y );
	TSRImmediateDraw::TexCoord2f( 0.0f, 1.0f );
	TSRImmediateDraw::Vertex2f( -halfRes.x, -halfRes.y );
	TSRImmediateDraw::TexCoord2f( 1.0f, 0.0f );
	TSRImmediateDraw::Vertex2f( halfRes.x, halfRes.y );
	TSRImmediateDraw::TexCoord2f( 0.0f, 0.0f );
	TSRImmediateDraw::Vertex2f( -halfRes.x, halfRes.y );
	TSRImmediateDraw::End( _pFullScreenEffect );
}
Esempio n. 2
0
void CamClient::sendHeartbeatFrameMJPPCM(qint64 now, const QByteArray& jpeg)
{
	QByteArray lowRateJpeg;

    if (clientIsServiceActive(m_avmuxPortHighRate) && clientClearToSend(m_avmuxPortHighRate) &&
            SyntroUtils::syntroTimerExpired(now, m_lastFullFrameTime, m_highRateMaxInterval)) {
        SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX) + jpeg.size());
        SYNTRO_RECORD_AVMUX *videoHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
        SyntroUtils::avmuxHeaderInit(videoHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_REFRESH, m_recordIndex++, 0, jpeg.size(), 0);
        memcpy((unsigned char *)(videoHead + 1), jpeg.data(), jpeg.size());
        int length = sizeof(SYNTRO_RECORD_AVMUX) + jpeg.size();
        clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_LOWPRI);
        m_lastFrameTime = m_lastFullFrameTime = now;
    }

    if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate) && clientClearToSend(m_avmuxPortLowRate) &&
            SyntroUtils::syntroTimerExpired(now, m_lastLowRateFullFrameTime, m_lowRateMaxInterval)) {

		lowRateJpeg = jpeg;
		if (m_lowRateHalfRes)
			halfRes(lowRateJpeg);

        SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size());
        SYNTRO_RECORD_AVMUX *videoHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
        SyntroUtils::avmuxHeaderInit(videoHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_REFRESH, m_recordIndex++, 0, lowRateJpeg.size(), 0);
        memcpy((unsigned char *)(videoHead + 1), lowRateJpeg.data(), lowRateJpeg.size());
        int length = sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size();
        clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_LOWPRI);
        m_lastLowRateFrameTime = m_lastLowRateFullFrameTime = now;
    }

    if (SyntroUtils::syntroTimerExpired(now, m_lastFrameTime, m_highRateNullInterval))
        sendNullFrameMJPPCM(now, true);
    if (SyntroUtils::syntroTimerExpired(now, m_lastLowRateFrameTime, m_lowRateNullInterval))
        sendNullFrameMJPPCM(now, false);
}
Esempio n. 3
0
bool CamClient::sendAVMJPPCM(qint64 now, int param, bool checkMotion)
{
    qint64 videoTimestamp;
    qint64 audioTimestamp;
    QByteArray highRateJpeg;
	QByteArray lowRateJpeg;
    QByteArray audioFrame;
    bool audioValid;

    // see if anything to send

    dequeueVideoFrame(highRateJpeg, videoTimestamp);
    lowRateJpeg = highRateJpeg;
    audioValid = dequeueAudioFrame(audioFrame, audioTimestamp);

    if (clientIsServiceActive(m_avmuxPortHighRate)) {
        if (!SyntroUtils::syntroTimerExpired(now, m_lastFullFrameTime, m_highRateMinInterval)) {
            highRateJpeg.clear();
            if (SyntroUtils::syntroTimerExpired(now, m_lastFrameTime, m_highRateNullInterval)) {
                sendNullFrameMJPPCM(now, true);                 // in case very long time
            }
        }
    }
    if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate)) {
        if (!SyntroUtils::syntroTimerExpired(now, m_lastLowRateFullFrameTime, m_lowRateMinInterval)) {
            lowRateJpeg.clear();                          // too soon
            if (SyntroUtils::syntroTimerExpired(now, m_lastLowRateFrameTime, m_lowRateNullInterval)) {
                sendNullFrameMJPPCM(now, false);                // in case very long time
            }
        }
    }
    if ((highRateJpeg.size() > 0) || audioValid) {
        if (clientIsServiceActive(m_avmuxPortHighRate) && clientClearToSend(m_avmuxPortHighRate) ) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX) + highRateJpeg.size() + audioFrame.size());
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, param, m_recordIndex++, 0, highRateJpeg.size(), audioFrame.size());
            if (audioValid)
                SyntroUtils::convertInt64ToUC8(audioTimestamp, avHead->recordHeader.timestamp);
            if (highRateJpeg.size() > 0)
                SyntroUtils::convertInt64ToUC8(videoTimestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (highRateJpeg.size() > 0) {
                memcpy(ptr, highRateJpeg.data(), highRateJpeg.size());
                m_lastFullFrameTime = m_lastFrameTime = now;
                ptr += highRateJpeg.size();
            }

            if (audioFrame.size() > 0)
                memcpy(ptr, audioFrame.data(), audioFrame.size());

            int length = sizeof(SYNTRO_RECORD_AVMUX) + highRateJpeg.size() + audioFrame.size();
            clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    }

    if ((lowRateJpeg.size() > 0) || audioValid) {
        if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate) && clientClearToSend(m_avmuxPortLowRate)) {
            if ((lowRateJpeg.size() > 0) && m_lowRateHalfRes)
				halfRes(lowRateJpeg);

            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size() + audioFrame.size());
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, param, m_recordIndex++, 0, lowRateJpeg.size(), audioFrame.size());
            if (audioValid)
                SyntroUtils::convertInt64ToUC8(audioTimestamp, avHead->recordHeader.timestamp);
            if (lowRateJpeg.size() > 0)
                SyntroUtils::convertInt64ToUC8(videoTimestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (lowRateJpeg.size() > 0) {
                memcpy(ptr, lowRateJpeg.data(), lowRateJpeg.size());
                m_lastLowRateFullFrameTime = m_lastLowRateFrameTime = now;
                ptr += lowRateJpeg.size();
            }

            if (audioFrame.size() > 0)
                memcpy(ptr, audioFrame.data(), audioFrame.size());

            int length = sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size() + audioFrame.size();
            clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    }

    if ((highRateJpeg.size() > 0) && checkMotion) {
        if ((now - m_lastDeltaTime) > m_deltaInterval)
            checkForMotion(now, highRateJpeg);
        return m_imageChanged;                              // image may have changed
    }
    return false;                                           // change not processed
}
Esempio n. 4
0
void CamClient::sendPrerollMJPPCM(bool highRate)
{
    PREROLL *videoPreroll = NULL;
    PREROLL *audioPreroll = NULL;
    int videoSize = 0;
    int audioSize = 0;

    if (highRate) {
        if (!m_videoPrerollQueue.empty()) {
            videoPreroll = m_videoPrerollQueue.dequeue();
            videoSize = videoPreroll->data.size();
            m_lastFrameTime = QDateTime::currentMSecsSinceEpoch();
        }
        if (!m_audioPrerollQueue.empty()) {
            audioPreroll = m_audioPrerollQueue.dequeue();
            audioSize = audioPreroll->data.size();
        }

        if ((videoPreroll != NULL) || (audioPreroll != NULL)) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize);
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_PREROLL, m_recordIndex++, 0, videoSize, audioSize);

            if (audioPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(audioPreroll->timestamp, avHead->recordHeader.timestamp);
            if (videoPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(videoPreroll->timestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (videoSize > 0) {
                memcpy(ptr, videoPreroll->data.data(), videoSize);
                ptr += videoSize;
            }

            if (audioSize > 0)
                memcpy(ptr, audioPreroll->data.data(), audioSize);

            int length = sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize;
            clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    } else {
        if (!m_videoLowRatePrerollQueue.empty()) {
            videoPreroll = m_videoLowRatePrerollQueue.dequeue();
			if (m_lowRateHalfRes)
				halfRes(videoPreroll->data);
            videoSize = videoPreroll->data.size();
            m_lastLowRateFrameTime = SyntroClock();
        }
        if (!m_audioLowRatePrerollQueue.empty()) {
            audioPreroll = m_audioLowRatePrerollQueue.dequeue();
            audioSize = audioPreroll->data.size();
        }

        if ((videoPreroll != NULL) || (audioPreroll == NULL)) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize);
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_PREROLL, m_recordIndex++, 0, videoSize, audioSize);

            if (audioPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(audioPreroll->timestamp, avHead->recordHeader.timestamp);
            if (videoPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(videoPreroll->timestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (videoSize > 0) {
                memcpy(ptr, videoPreroll->data.data(), videoSize);
                ptr += videoSize;
            }

            if (audioSize > 0)
                memcpy(ptr, audioPreroll->data.data(), audioSize);

            int length = sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize;
            clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    }

    if (videoPreroll != NULL)
        delete videoPreroll;
    if (audioPreroll != NULL)
        delete audioPreroll;
}