void SyntroPythonClient::clientSendE2EData(int servicePort, QByteArray data)
{
    if (clientIsServiceActive(servicePort)) {
        SYNTRO_EHEAD *multiCast = clientBuildMessage(servicePort, data.size());
        memcpy(multiCast + 1, data.data(), data.size());
        clientSendMessage(servicePort, multiCast, data.size(), SYNTROLINK_MEDPRI);
    }
}
Esempio n. 2
0
bool CFSClient::appClientProcessThreadMessage(SyntroThreadMsg *msg)
{
	if (msg->message == SYNTRO_CFS_MESSAGE) {
		clientSendMessage(m_CFSPort, (SYNTRO_EHEAD *)msg->ptrParam, msg->intParam, SYNTROCFS_E2E_PRIORITY);
		return true;
	}

	return false;
}
Esempio n. 3
0
void CamClient::sendNullFrameMJPPCM(qint64 now, bool highRate)
{
    if (highRate) {
        if (clientIsServiceActive(m_avmuxPortHighRate) && clientClearToSend(m_avmuxPortHighRate)) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX));
            SYNTRO_RECORD_AVMUX *videoHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(videoHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_NOOP, m_recordIndex++, 0, 0, 0);
            int length = sizeof(SYNTRO_RECORD_AVMUX);
            clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_LOWPRI);
            m_lastFrameTime = now;
        }
    } else {

        if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate) && clientClearToSend(m_avmuxPortLowRate)) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX));
            SYNTRO_RECORD_AVMUX *videoHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(videoHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_NOOP, m_recordIndex++, 0, 0, 0);
            int length = sizeof(SYNTRO_RECORD_AVMUX);
            clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_LOWPRI);
            m_lastLowRateFrameTime = now;
        }
    }
}
Esempio n. 4
0
//断开连接按钮
void client::on_disconnectpushButton_clicked()
{
    ui->clientMessagelineEdit->setText (tr("clientStop"));
    clientSendMessage ();
    ui->clientMessagelineEdit->setText (tr(""));
    ui->cStatuslabel->setText (tr("连接断开"));
    tcp->abort();
    delete tcp;
    tcp=NULL;
    ui->connectpushButton->setEnabled (true);
    ui->disconnectpushButton->setEnabled (false);
    ui->clientSendpushButton->setEnabled (false);


}
Esempio n. 5
0
void CamClient::sendHeartbeatFrameMJPPCM(qint64 now, const QByteArray& jpeg)
{
	QByteArray lowRateJpeg;

    if (clientIsServiceActive(m_avmuxPortHighRate) && clientClearToSend(m_avmuxPortHighRate) &&
            SyntroUtils::syntroTimerExpired(now, m_lastFullFrameTime, m_highRateMaxInterval)) {
        SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX) + jpeg.size());
        SYNTRO_RECORD_AVMUX *videoHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
        SyntroUtils::avmuxHeaderInit(videoHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_REFRESH, m_recordIndex++, 0, jpeg.size(), 0);
        memcpy((unsigned char *)(videoHead + 1), jpeg.data(), jpeg.size());
        int length = sizeof(SYNTRO_RECORD_AVMUX) + jpeg.size();
        clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_LOWPRI);
        m_lastFrameTime = m_lastFullFrameTime = now;
    }

    if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate) && clientClearToSend(m_avmuxPortLowRate) &&
            SyntroUtils::syntroTimerExpired(now, m_lastLowRateFullFrameTime, m_lowRateMaxInterval)) {

		lowRateJpeg = jpeg;
		if (m_lowRateHalfRes)
			halfRes(lowRateJpeg);

        SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size());
        SYNTRO_RECORD_AVMUX *videoHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
        SyntroUtils::avmuxHeaderInit(videoHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_REFRESH, m_recordIndex++, 0, lowRateJpeg.size(), 0);
        memcpy((unsigned char *)(videoHead + 1), lowRateJpeg.data(), lowRateJpeg.size());
        int length = sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size();
        clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_LOWPRI);
        m_lastLowRateFrameTime = m_lastLowRateFullFrameTime = now;
    }

    if (SyntroUtils::syntroTimerExpired(now, m_lastFrameTime, m_highRateNullInterval))
        sendNullFrameMJPPCM(now, true);
    if (SyntroUtils::syntroTimerExpired(now, m_lastLowRateFrameTime, m_lowRateNullInterval))
        sendNullFrameMJPPCM(now, false);
}
Esempio n. 6
0
void CamClient::sendCaps()
{
    if (!clientIsServiceActive(m_avmuxPort) || !clientClearToSend(m_avmuxPort))
        return;

    gchar *videoCaps = m_encoder->getVideoCaps();
    gchar *audioCaps = m_encoder->getAudioCaps();

    int videoLength = 0;
    int audioLength = 0;
    int totalLength = 0;

    if (videoCaps != NULL)
        videoLength = strlen(videoCaps) + 1;

    if (audioCaps != NULL)
        audioLength = strlen(audioCaps) + 1;

    totalLength = videoLength + audioLength;

    if (totalLength == 0)
        return;

    SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPort, sizeof(SYNTRO_RECORD_AVMUX) + totalLength);
    SYNTRO_RECORD_AVMUX *avmux = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
    SyntroUtils::avmuxHeaderInit(avmux, &m_avParams, SYNTRO_RECORDHEADER_PARAM_NORMAL, m_recordIndex++, 0, videoLength, audioLength);
    avmux->videoSubtype = SYNTRO_RECORD_TYPE_VIDEO_RTPCAPS;
    avmux->audioSubtype = SYNTRO_RECORD_TYPE_AUDIO_RTPCAPS;

    unsigned char *ptr = (unsigned char *)(avmux + 1);

    if (videoLength > 0) {
        memcpy(ptr, videoCaps, videoLength);
        ptr += videoLength;
    }

    if (audioLength > 0) {
        memcpy(ptr, audioCaps, audioLength);
        ptr += audioLength;
    }

    clientSendMessage(m_avmuxPort, multiCast, sizeof(SYNTRO_RECORD_AVMUX) + totalLength, SYNTROLINK_MEDPRI);
 }
Esempio n. 7
0
void DisplayClient::ledWrite(quint32 mask, quint32 values)
{
	if (!clientIsConnected())
		return;

	if (!clientIsServiceActive(m_controlPort))
		return;

	int length = 2 * sizeof(quint32);

	SYNTRO_EHEAD *head = clientBuildMessage(m_controlPort, length);

	quint32 *p = (quint32 *)(head + 1);

	p[0] = mask;
	p[1] = values;

	clientSendMessage(m_controlPort, head, length, SYNTROLINK_MEDPRI);
}
void SyntroPythonClient::clientSendJpegAVData(int servicePort, QByteArray video, QByteArray audio)
{
    if (clientIsServiceActive(servicePort) && clientClearToSend(servicePort)) {
        SYNTRO_EHEAD *multiCast = clientBuildMessage(servicePort, sizeof(SYNTRO_RECORD_AVMUX)
                                                  + video.size() + audio.size());
        SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
        SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_NORMAL, m_recordIndex++, 0, video.size(), audio.size());
        SyntroUtils::convertInt64ToUC8(QDateTime::currentMSecsSinceEpoch(), avHead->recordHeader.timestamp);

        unsigned char *ptr = (unsigned char *)(avHead + 1);

        if (video.size() > 0) {
            memcpy(ptr, video.data(), video.size());
            ptr += video.size();
        }

        if (audio.size() > 0) {
            memcpy(ptr, audio.data(), audio.size());
        }

        int length = sizeof(SYNTRO_RECORD_AVMUX) + video.size() + audio.size();
        clientSendMessage(servicePort, multiCast, length, SYNTROLINK_MEDPRI);
    }
}
Esempio n. 9
0
void CamClient::processAVQueue()
{
    QByteArray data;

    int videoLength, audioLength;
    SYNTRO_UC4 lengthData;
    QByteArray videoArray;
    QByteArray audioArray;
    int totalLength;
    unsigned char *ptr;
    qint64 videoTimestamp;
    qint64 audioTimestamp;
    int videoParam;
    int audioParam;

    if (m_avmuxPort == -1)
        return;

    if (!clientIsServiceActive(m_avmuxPort)) {             // just discard encoder queue
        while (m_encoder->getCompressedVideo(data, videoTimestamp, videoParam))
            ;
        while (m_encoder->getCompressedAudio(data, audioTimestamp, audioParam))
            ;
        return;
    }

    if (!m_encoder->pipelinesActive())
        return;

    if (SyntroUtils::syntroTimerExpired(QDateTime::currentMSecsSinceEpoch(), m_lastCapsSend, CAMCLIENT_CAPS_INTERVAL)) {
        sendCaps();
        m_lastCapsSend = QDateTime::currentMSecsSinceEpoch();
    }

    if (clientClearToSend(m_avmuxPort)) {

        while (m_encoder->getCompressedVideo(data, videoTimestamp, videoParam)) {
            SyntroUtils::convertIntToUC4(data.length(), lengthData);
            videoArray.append((const char *)lengthData, 4);
            videoArray.append(data);
        }
        videoLength = videoArray.length();

        while (m_encoder->getCompressedAudio(data, audioTimestamp, audioParam)) {
            SyntroUtils::convertIntToUC4(data.length(), lengthData);
            audioArray.append((const char *)lengthData, 4);
            audioArray.append(data);
        }
        audioLength = audioArray.length();

        totalLength = videoLength + audioLength;

        if (totalLength > 0) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPort, sizeof(SYNTRO_RECORD_AVMUX) + totalLength);
            SYNTRO_RECORD_AVMUX *avmux = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avmux, &m_avParams, SYNTRO_RECORDHEADER_PARAM_NORMAL, m_recordIndex++, 0, videoLength, audioLength);

            if ((audioLength > 0) && (videoLength == 0)) {
                SyntroUtils::convertInt64ToUC8(audioTimestamp, avmux->recordHeader.timestamp);
            SyntroUtils::convertIntToUC2(audioParam, avmux->recordHeader.param);
            } else {
                SyntroUtils::convertInt64ToUC8(videoTimestamp, avmux->recordHeader.timestamp);
                SyntroUtils::convertIntToUC2(videoParam, avmux->recordHeader.param);
            }

            ptr = (unsigned char *)(avmux + 1);

            if (videoLength > 0) {
                memcpy(ptr, videoArray.constData(), videoLength);
                ptr += videoLength;
            }

            if (audioLength > 0) {
                memcpy(ptr, audioArray.constData(), audioLength);
                ptr += audioLength;
            }
            clientSendMessage(m_avmuxPort, multiCast, sizeof(SYNTRO_RECORD_AVMUX) + totalLength, SYNTROLINK_MEDPRI);

            m_videoByteCountLock.lock();
            m_videoByteCount += videoLength;
            m_videoByteCountLock.unlock();
            m_audioByteCountLock.lock();
            m_audioByteCount += audioLength;
            m_audioByteCountLock.unlock();

        }
    }
}
Esempio n. 10
0
bool CamClient::sendAVMJPPCM(qint64 now, int param, bool checkMotion)
{
    qint64 videoTimestamp;
    qint64 audioTimestamp;
    QByteArray highRateJpeg;
	QByteArray lowRateJpeg;
    QByteArray audioFrame;
    bool audioValid;

    // see if anything to send

    dequeueVideoFrame(highRateJpeg, videoTimestamp);
    lowRateJpeg = highRateJpeg;
    audioValid = dequeueAudioFrame(audioFrame, audioTimestamp);

    if (clientIsServiceActive(m_avmuxPortHighRate)) {
        if (!SyntroUtils::syntroTimerExpired(now, m_lastFullFrameTime, m_highRateMinInterval)) {
            highRateJpeg.clear();
            if (SyntroUtils::syntroTimerExpired(now, m_lastFrameTime, m_highRateNullInterval)) {
                sendNullFrameMJPPCM(now, true);                 // in case very long time
            }
        }
    }
    if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate)) {
        if (!SyntroUtils::syntroTimerExpired(now, m_lastLowRateFullFrameTime, m_lowRateMinInterval)) {
            lowRateJpeg.clear();                          // too soon
            if (SyntroUtils::syntroTimerExpired(now, m_lastLowRateFrameTime, m_lowRateNullInterval)) {
                sendNullFrameMJPPCM(now, false);                // in case very long time
            }
        }
    }
    if ((highRateJpeg.size() > 0) || audioValid) {
        if (clientIsServiceActive(m_avmuxPortHighRate) && clientClearToSend(m_avmuxPortHighRate) ) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX) + highRateJpeg.size() + audioFrame.size());
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, param, m_recordIndex++, 0, highRateJpeg.size(), audioFrame.size());
            if (audioValid)
                SyntroUtils::convertInt64ToUC8(audioTimestamp, avHead->recordHeader.timestamp);
            if (highRateJpeg.size() > 0)
                SyntroUtils::convertInt64ToUC8(videoTimestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (highRateJpeg.size() > 0) {
                memcpy(ptr, highRateJpeg.data(), highRateJpeg.size());
                m_lastFullFrameTime = m_lastFrameTime = now;
                ptr += highRateJpeg.size();
            }

            if (audioFrame.size() > 0)
                memcpy(ptr, audioFrame.data(), audioFrame.size());

            int length = sizeof(SYNTRO_RECORD_AVMUX) + highRateJpeg.size() + audioFrame.size();
            clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    }

    if ((lowRateJpeg.size() > 0) || audioValid) {
        if (m_generateLowRate && clientIsServiceActive(m_avmuxPortLowRate) && clientClearToSend(m_avmuxPortLowRate)) {
            if ((lowRateJpeg.size() > 0) && m_lowRateHalfRes)
				halfRes(lowRateJpeg);

            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size() + audioFrame.size());
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, param, m_recordIndex++, 0, lowRateJpeg.size(), audioFrame.size());
            if (audioValid)
                SyntroUtils::convertInt64ToUC8(audioTimestamp, avHead->recordHeader.timestamp);
            if (lowRateJpeg.size() > 0)
                SyntroUtils::convertInt64ToUC8(videoTimestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (lowRateJpeg.size() > 0) {
                memcpy(ptr, lowRateJpeg.data(), lowRateJpeg.size());
                m_lastLowRateFullFrameTime = m_lastLowRateFrameTime = now;
                ptr += lowRateJpeg.size();
            }

            if (audioFrame.size() > 0)
                memcpy(ptr, audioFrame.data(), audioFrame.size());

            int length = sizeof(SYNTRO_RECORD_AVMUX) + lowRateJpeg.size() + audioFrame.size();
            clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    }

    if ((highRateJpeg.size() > 0) && checkMotion) {
        if ((now - m_lastDeltaTime) > m_deltaInterval)
            checkForMotion(now, highRateJpeg);
        return m_imageChanged;                              // image may have changed
    }
    return false;                                           // change not processed
}
Esempio n. 11
0
void CamClient::sendPrerollMJPPCM(bool highRate)
{
    PREROLL *videoPreroll = NULL;
    PREROLL *audioPreroll = NULL;
    int videoSize = 0;
    int audioSize = 0;

    if (highRate) {
        if (!m_videoPrerollQueue.empty()) {
            videoPreroll = m_videoPrerollQueue.dequeue();
            videoSize = videoPreroll->data.size();
            m_lastFrameTime = QDateTime::currentMSecsSinceEpoch();
        }
        if (!m_audioPrerollQueue.empty()) {
            audioPreroll = m_audioPrerollQueue.dequeue();
            audioSize = audioPreroll->data.size();
        }

        if ((videoPreroll != NULL) || (audioPreroll != NULL)) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortHighRate, sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize);
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_PREROLL, m_recordIndex++, 0, videoSize, audioSize);

            if (audioPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(audioPreroll->timestamp, avHead->recordHeader.timestamp);
            if (videoPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(videoPreroll->timestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (videoSize > 0) {
                memcpy(ptr, videoPreroll->data.data(), videoSize);
                ptr += videoSize;
            }

            if (audioSize > 0)
                memcpy(ptr, audioPreroll->data.data(), audioSize);

            int length = sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize;
            clientSendMessage(m_avmuxPortHighRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    } else {
        if (!m_videoLowRatePrerollQueue.empty()) {
            videoPreroll = m_videoLowRatePrerollQueue.dequeue();
			if (m_lowRateHalfRes)
				halfRes(videoPreroll->data);
            videoSize = videoPreroll->data.size();
            m_lastLowRateFrameTime = SyntroClock();
        }
        if (!m_audioLowRatePrerollQueue.empty()) {
            audioPreroll = m_audioLowRatePrerollQueue.dequeue();
            audioSize = audioPreroll->data.size();
        }

        if ((videoPreroll != NULL) || (audioPreroll == NULL)) {
            SYNTRO_EHEAD *multiCast = clientBuildMessage(m_avmuxPortLowRate, sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize);
            SYNTRO_RECORD_AVMUX *avHead = (SYNTRO_RECORD_AVMUX *)(multiCast + 1);
            SyntroUtils::avmuxHeaderInit(avHead, &m_avParams, SYNTRO_RECORDHEADER_PARAM_PREROLL, m_recordIndex++, 0, videoSize, audioSize);

            if (audioPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(audioPreroll->timestamp, avHead->recordHeader.timestamp);
            if (videoPreroll != NULL)
                SyntroUtils::convertInt64ToUC8(videoPreroll->timestamp, avHead->recordHeader.timestamp);

            unsigned char *ptr = (unsigned char *)(avHead + 1);

            if (videoSize > 0) {
                memcpy(ptr, videoPreroll->data.data(), videoSize);
                ptr += videoSize;
            }

            if (audioSize > 0)
                memcpy(ptr, audioPreroll->data.data(), audioSize);

            int length = sizeof(SYNTRO_RECORD_AVMUX) + videoSize + audioSize;
            clientSendMessage(m_avmuxPortLowRate, multiCast, length, SYNTROLINK_MEDPRI);
        }
    }

    if (videoPreroll != NULL)
        delete videoPreroll;
    if (audioPreroll != NULL)
        delete audioPreroll;
}
Esempio n. 12
0
errorCode RTPImageConnector::run()
{
	uint64_t finish = 0;
	while(finish == 0)
	{
		ImageBufferContainer* message;
		while(message = clientGetMessage())
		{
			if (message->type == GROUP_OBSERVED)
			{
				int nr;
				for (nr = 0; nr < streams.size(); nr++)
					if (!streams[nr].is_used)
						break;
				if (nr < streams.size())
				{
					//gst-launch-1.0 appsrc ! videoconf ! x264enc ! rtph264pay config-interval=10 pt=96 ! udpsink host=127.0.0.1 port=5000
					GST_LOAD_ELEMENT_OR_DIE(streams[nr],appsrc)
					g_object_set (G_OBJECT (streams[nr].appsrc), "caps",
						gst_caps_new_simple ("video/x-raw",
						"format", G_TYPE_STRING, "RGBx",
						"bpp", G_TYPE_INT, 32,
						"depth", G_TYPE_INT, 32,
						"width", G_TYPE_INT, message->group->getFramebufferWidth(),
						"height", G_TYPE_INT, message->group->getFramebufferHeight(),
						"framerate", GST_TYPE_FRACTION, 0, 1,
						NULL), NULL);
						g_object_set (G_OBJECT (streams[nr].appsrc),
							"do-timestamp", 1,
							"min-percent", 0,
							"emit-signals", 0,
							"format", GST_FORMAT_TIME, NULL);
					GST_LOAD_ELEMENT_OR_DIE(streams[nr],videoconvert)
					if (raw)
					{
						GST_LOAD_ELEMENT_OR_DIE(streams[nr],capsfilter)
						g_object_set (G_OBJECT (streams[nr].capsfilter), "caps",
							gst_caps_new_simple ("video/x-raw",
							"format", G_TYPE_STRING, "I420",
							NULL), NULL);
						GST_LOAD_ELEMENT_OR_DIE(streams[nr],jpegenc)
						GST_LOAD_ELEMENT_OR_DIE(streams[nr],rtpjpegpay)
						g_object_set(G_OBJECT(streams[nr].rtpjpegpay),
							"pt", 96, NULL);
					}
					else
					{
						GST_LOAD_ELEMENT_OR_DIE(streams[nr],x264enc)
						size_t bitrate_heuristic = (size_t) (
							(uint64_t)3000 *
							(uint64_t)message->group->getFramebufferWidth() *
							(uint64_t)message->group->getFramebufferHeight() /
							(uint64_t)800 /
							(uint64_t)600 );
						g_object_set (G_OBJECT (streams[nr].x264enc),
							"tune", zerolatency ? 0x00000004 : 0x00000000,
							"psy-tune", 2,
							"speed-preset", 1,
							"bitrate", bitrate_heuristic,
							"threads", 2,
							"byte-stream", 1,  NULL);
						GST_LOAD_ELEMENT_OR_DIE(streams[nr],rtph264pay)
						g_object_set(G_OBJECT(streams[nr].rtph264pay),
							"config-interval", 10,
							"pt", 96, NULL);
					}
					GST_LOAD_ELEMENT_OR_DIE(streams[nr],udpsink)
					g_object_set(G_OBJECT(streams[nr].udpsink),
						"host", message->target.c_str(),
						"port", nr+minport, NULL);
					
					streams[nr].pipeline = gst_pipeline_new( NULL );
					streams[nr].bin = gst_bin_new( NULL );
					gboolean success = 0;
					if (raw)
					{
						gst_bin_add_many(GST_BIN(streams[nr].bin), streams[nr].appsrc, streams[nr].videoconvert, streams[nr].capsfilter, streams[nr].jpegenc, streams[nr].rtpjpegpay, streams[nr].udpsink, NULL);
						gst_bin_add(GST_BIN(streams[nr].pipeline), streams[nr].bin);
						success = gst_element_link_many(streams[nr].appsrc, streams[nr].videoconvert, streams[nr].capsfilter, streams[nr].jpegenc, streams[nr].rtpjpegpay, streams[nr].udpsink, NULL);
					}
					else
					{
						gst_bin_add_many(GST_BIN(streams[nr].bin), streams[nr].appsrc, streams[nr].videoconvert, streams[nr].x264enc, streams[nr].rtph264pay, streams[nr].udpsink, NULL);
						gst_bin_add(GST_BIN(streams[nr].pipeline), streams[nr].bin);
						success = gst_element_link_many(streams[nr].appsrc, streams[nr].videoconvert, streams[nr].x264enc, streams[nr].rtph264pay, streams[nr].udpsink, NULL);
					}
					if ( !success )
					{
						fprintf(stderr,"RTPImageConnector: Could not link elements for rtp stream.\n");
					}
					else
					{
						streams[nr].is_used = true;
						streams[nr].group = message->group;
						streams[nr].ref = message->reference;
						streams[nr].url = message->target;
						char* register_message = (char*)malloc(128);
						if (raw)
							sprintf(register_message,"v=0\nm=video %i RTP/AVP 96\nc=IN IP4 %s\na=rtpmap:96 JPEG/90000\n",nr+minport,url.c_str());
						else
							sprintf(register_message,"v=0\nm=video %i RTP/AVP 96\nc=IN IP4 %s\na=rtpmap:96 H264/90000\n",nr+minport,url.c_str());
						clientSendMessage(new ImageBufferContainer(REGISTER_STREAM,(uint8_t*)register_message,message->group,1,message->target,message->reference));
						if (raw)
							printf("RTIPImageConnector: Openend JPEG Stream at port %i\n",minport+nr);
						else
							printf("RTIPImageConnector: Openend H264 Stream at port %i\n",minport+nr);
						if (gst_element_set_state(GST_ELEMENT(streams[nr].pipeline), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
							printf("RTIPImageConnector: Could not play stream!\n");
					}
				}
				else
					fprintf(stderr,"RTPImageConnector: No free port!\n");
			}