コード例 #1
0
void Inform::setOutputContext(Context_t outputContext, ID_t id)
{
  // Find the proper connection
  InformStream *s = findStream(id);
  PAssert(s != 0);

  // Change the output context
  s->setOutputContext(outputContext);
}
コード例 #2
0
void Inform::setOutputLevel(Level_t newval, ID_t id)
{
  // Find the proper connection
  InformStream *s = findStream(id);
  PAssert(s != 0);

  // Change the output level
  s->setOutputLevel(newval);
}
コード例 #3
0
Inform::Context_t Inform::outputContext(ID_t id) const
{
  // Find the proper connection
  InformStream *s = findStream(id);
  PAssert(s != 0);

  // Return the output context
  return s->outputContext();
}
コード例 #4
0
Inform::Level_t Inform::outputLevel(ID_t id) const
{
  // Find the proper connection
  InformStream *s = findStream(id);
  PAssert(s != 0);

  // Return the output level
  return s->outputLevel();
}
コード例 #5
0
void TCPStreamDialog::on_streamNumberSpinBox_valueChanged(int new_stream)
{
    if (new_stream >= 0 && new_stream < int(get_tcp_stream_count())) {
        graph_.stream = new_stream;
        clear_address(&graph_.src_address);
        clear_address(&graph_.dst_address);
        findStream();
        fillGraph();
    }
}
コード例 #6
0
/* Listener client attaches to a stream
 */
bool openavbEptSrvrAttachStream(int h,
                            AVBStreamID_t *streamID,
                            openavbSrpLsnrDeclSubtype_t ld)
{
	openavbRC rc = OPENAVB_SUCCESS;
	static U8 emptyMAC[ETH_ALEN] = { 0, 0, 0, 0, 0, 0 };
	static AVBTSpec_t emptytSpec = {0, 0};

	AVB_TRACE_ENTRY(AVB_TRACE_ENDPOINT);

	clientStream_t *ps = findStream(streamID);
	if (ps && ps->clientHandle != h) {
		AVB_LOGF_ERROR("Error attaching listener: multiple clients for stream %d", streamID->uniqueID);
		AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
		return FALSE;
	}

	if (!ps) {
		ps = addStream(h, streamID);
		if (!ps) {
			AVB_LOGF_ERROR("Error attaching listener: unable to add client stream %d", streamID->uniqueID);
			AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
			return FALSE;
		}
		ps->role = clientListener;
	}

	if(x_cfg.noSrp) {
		// we are operating in a mode supporting preconfigured streams; SRP is not in use,
		if(ld == openavbSrp_LDSt_Interest) {
			// As a proxy for SRP, which would normally make this call after confirming
			// availability of the stream, call the callback from here
			strmRegCb((void*)ps, openavbSrp_AtTyp_TalkerAdvertise,
					  emptyMAC, // a flag to listener to read info from configuration file
					  &emptytSpec,
					  MAX_AVB_SR_CLASSES, // srClass - value doesn't matter because openavbEptSrvrNotifyLstnrOfSrpCb() throws it away
					  1, // accumLatency
					  NULL); // *failInfo
		}
	} else {
		// Normal SRP Operation so pass to SRP
		rc = openavbSrpAttachStream((void*)ps, streamID, ld);
		if (!IS_OPENAVB_SUCCESS(rc))
			delStream(ps);
	}

	openavbEndPtLogAllStaticStreams();

	AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
	return IS_OPENAVB_SUCCESS(rc);
}
コード例 #7
0
ファイル: ws_api.cpp プロジェクト: nonolith/connect
bool StreamingDevice::processMessage(ClientConn& client, string& cmd, JSONNode& n){
	if (cmd == "listen"){
		cancelListen(findListener(&client, jsonIntProp(n, "id")));
		addListener(makeStreamListener(this, &client, n));
	
	}else if (cmd == "cancelListen"){
		cancelListen(findListener(&client, jsonIntProp(n, "id")));
	
	}else if (cmd == "configure"){
		int      mode =       jsonIntProp(n,   "mode");
		unsigned samples =    jsonIntProp(n,   "samples");
		double    sampleTime = jsonFloatProp(n, "sampleTime");
		bool     continuous = jsonBoolProp(n,  "continuous", false);
		bool     raw =        jsonBoolProp(n,  "raw", false);
		configure(mode, sampleTime, samples, continuous, raw);
	
	}else if (cmd == "startCapture"){
		start_capture();
	
	}else if (cmd == "pauseCapture"){
		pause_capture();
	
	}else if (cmd == "set"){
		Channel *channel = channelById(jsonStringProp(n, "channel"));
		if (!channel) throw ErrorStringException("Channel not found");
		setOutput(channel, makeSource(n));
		
	}else if (cmd == "setGain"){
		Channel *channel = channelById(jsonStringProp(n, "channel"));
		if (!channel) throw ErrorStringException("Channel not found");
		Stream *stream = findStream(
				jsonStringProp(n, "channel"),
				jsonStringProp(n, "stream"));

		double gain = jsonFloatProp(n, "gain", 1);
		
		setGain(channel, stream, gain);
	}else if (cmd == "setCurrentLimit"){
		unsigned limit = jsonFloatProp(n, "currentLimit");
		setCurrentLimit(limit);
	}else{
		return false;
	}
	return true;
}
コード例 #8
0
/* Client (talker or listener) going away
 */
bool openavbEptSrvrStopStream(int h, AVBStreamID_t *streamID)
{
	AVB_TRACE_ENTRY(AVB_TRACE_ENDPOINT);

	clientStream_t *ps = findStream(streamID);
	if (!ps || ps->clientHandle != h) {
		AVB_LOGF_ERROR("Error stopping client: missing record for stream %d", streamID->uniqueID);
		AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
		return FALSE;
	}

	bool rc = FALSE;
	if (ps->role == clientTalker)
		rc = x_talkerDeregister(ps);
	else if (ps->role == clientListener)
		rc = x_listenerDetach(ps);

	AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
	return rc;
}
コード例 #9
0
void*
StreamedMesh::internalStreamPtr()
{    
    if (vertexBufferSize() == 0)
    {
        LOG ("Error, internal vertex buffer size is 0");
        return 0;
    }

    if (!_vertexBufferCpuMemory)
    {
        _vertexBufferCpuMemory = (float*)malloc(sizeof(float)*vertexBufferSize());
    }

    // stash the buffer and put it in an array
    std::map <uint32_t, float*> streamMap;

    for (auto it = _vertexStreams.begin();
        it != _vertexStreams.end();
        it++)
    {
        streamMap.insert (std::make_pair(it->first, it->second->stream()));
    }

    const std::vector <VertexElement>& declaration = _vertexDeclaration->getDeclaration();
    float* vb = (float*)_vertexBufferCpuMemory;

    for (uint32_t i = 0; i < vertexCount(); i++)
    {    
        for (uint32_t j = 0; j < declaration.size()-1; j++)
        {
            VertexElement element = declaration[j];
            VertexStream* stream = 0;
            if (findStream (stream, element))
            {
                uint32_t streamId = VertexStream::id(*stream);
                auto streamIt =  streamMap.find(streamId);

                if (streamIt != streamMap.end() &&
                    streamIt->second)
                {
                    for (uint32_t k = 0; k < stream->stride(); k++)
                    {
                        *vb++ = streamIt->second[k];
                    }
                    streamIt->second += stream->stride();
                }
                else
                {
                    uint32_t stride = IVertexDeclaration::elementToSize(element.type());
                    for (uint32_t k = 0; k < stride; stride++)
                    {
                        *vb++ = 0;
                    }
                }
            }
            else
            {
                // can't find a stream for this element.
                // warn user, and 0 the buffer for the stride size
                uint32_t stride = IVertexDeclaration::elementToSize(element.type());
                for (uint32_t k = 0; k < stride; stride++)
                {
                    *vb++ = 0;
                }
            }
        }
    }

    return _vertexBufferCpuMemory;
}
コード例 #10
0
/* Talker client registers a stream
 */
bool openavbEptSrvrRegisterStream(int h,
                              AVBStreamID_t *streamID,
                              U8 destAddr[],
                              AVBTSpec_t *tSpec,
                              U8 srClass,
                              U8 srRank,
                              U32 latency)
{
	openavbRC rc = OPENAVB_SUCCESS;

	AVB_TRACE_ENTRY(AVB_TRACE_ENDPOINT);

	clientStream_t *ps = findStream(streamID);
	
	if (ps && ps->clientHandle != h) {
		AVB_LOGF_ERROR("Error registering talker; multiple clients for stream %d", streamID->uniqueID);
		AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
		return FALSE;
	}

	ps = addStream(h, streamID);
	if (!ps) {
		AVB_LOGF_ERROR("Error registering talker; unable to add client stream %d", streamID->uniqueID);
		AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
		return FALSE;
	}
	ps->role = clientTalker;
	ps->tSpec = *tSpec;
	ps->srClass = (SRClassIdx_t)srClass;
	ps->srRank  = srRank;
	ps->latency = latency;
	ps->fwmark = INVALID_FWMARK;

	if (memcmp(ps->destAddr, destAddr, ETH_ALEN) == 0) {
		// no client-supplied address, use MAAP
		struct ether_addr addr;
		ps->hndMaap = openavbMaapAllocate(1, &addr);
		if (ps->hndMaap) {
			memcpy(ps->destAddr, addr.ether_addr_octet, ETH_ALEN);
			strmAttachCb((void*)ps, openavbSrp_LDSt_Stream_Info);		// Inform talker about MAAP
		}
		else {
			AVB_LOG_ERROR("Error registering talker: MAAP failed to allocate MAC address");
			AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
			delStream(ps);
			return FALSE;
		}
	}
	else {
		// client-supplied destination MAC address
		memcpy(ps->destAddr, destAddr, ETH_ALEN);
		ps->hndMaap = NULL;
	}

	// Do SRP talker register
	AVB_LOGF_DEBUG("REGISTER: ps=%p, streamID=%d, tspec=%d,%d, srClass=%d, srRank=%d, latency=%d, da="ETH_FORMAT"",
				   ps, streamID->uniqueID,
				   tSpec->maxFrameSize, tSpec->maxIntervalFrames,
				   ps->srClass, ps->srRank, ps->latency,
				   ETH_OCTETS(ps->destAddr));


	if(x_cfg.noSrp) {
		// we are operating in a mode supporting preconfigured streams; SRP is not in use,
		// so, as a proxy for SRP, which would normally make this call after establishing
		// the stream, call the callback from here
		strmAttachCb((void*)ps, openavbSrp_LDSt_Ready);
	} else {
		// normal SRP operation
		rc = openavbSrpRegisterStream((void*)ps, &ps->streamID,
		                          ps->destAddr, &ps->tSpec,
		                          ps->srClass, ps->srRank,
		                          ps->latency);
		if (!IS_OPENAVB_SUCCESS(rc)) {
			if (ps->hndMaap)
				openavbMaapRelease(ps->hndMaap);
			delStream(ps);
		}
	}

	openavbEndPtLogAllStaticStreams();
	
	AVB_TRACE_EXIT(AVB_TRACE_ENDPOINT);
	return IS_OPENAVB_SUCCESS(rc);
}
コード例 #11
0
TCPStreamDialog::TCPStreamDialog(QWidget *parent, capture_file *cf, tcp_graph_type graph_type) :
    QDialog(NULL, Qt::Window),
    ui(new Ui::TCPStreamDialog),
    cap_file_(cf),
    ts_offset_(0),
    ts_origin_conn_(true),
    seq_offset_(0),
    seq_origin_zero_(true),
    title_(NULL),
    base_graph_(NULL),
    tput_graph_(NULL),
    seg_graph_(NULL),
    ack_graph_(NULL),
    rwin_graph_(NULL),
    tracer_(NULL),
    packet_num_(0),
    mouse_drags_(true),
    rubber_band_(NULL),
    num_dsegs_(-1),
    num_acks_(-1),
    num_sack_ranges_(-1)
{
    struct segment current;
    int graph_idx = -1;

    ui->setupUi(this);
    setAttribute(Qt::WA_DeleteOnClose, true);

    graph_.type = GRAPH_UNDEFINED;
    set_address(&graph_.src_address, AT_NONE, 0, NULL);
    graph_.src_port = 0;
    set_address(&graph_.dst_address, AT_NONE, 0, NULL);
    graph_.dst_port = 0;
    graph_.stream = 0;
    graph_.segments = NULL;

    struct tcpheader *header = select_tcpip_session(cap_file_, &current);
    if (!header) {
        done(QDialog::Rejected);
        return;
    }

//#ifdef Q_OS_MAC
//    ui->hintLabel->setAttribute(Qt::WA_MacSmallSize, true);
//#endif

    QComboBox *gtcb = ui->graphTypeComboBox;
    gtcb->setUpdatesEnabled(false);
    gtcb->addItem(ui->actionRoundTripTime->text(), GRAPH_RTT);
    if (graph_type == GRAPH_RTT) graph_idx = gtcb->count() - 1;
    gtcb->addItem(ui->actionThroughput->text(), GRAPH_THROUGHPUT);
    if (graph_type == GRAPH_THROUGHPUT) graph_idx = gtcb->count() - 1;
    gtcb->addItem(ui->actionStevens->text(), GRAPH_TSEQ_STEVENS);
    if (graph_type == GRAPH_TSEQ_STEVENS) graph_idx = gtcb->count() - 1;
    gtcb->addItem(ui->actionTcptrace->text(), GRAPH_TSEQ_TCPTRACE);
    if (graph_type == GRAPH_TSEQ_TCPTRACE) graph_idx = gtcb->count() - 1;
    gtcb->addItem(ui->actionWindowScaling->text(), GRAPH_WSCALE);
    if (graph_type == GRAPH_WSCALE) graph_idx = gtcb->count() - 1;
    gtcb->setUpdatesEnabled(true);

    ui->dragRadioButton->setChecked(mouse_drags_);

    ctx_menu_.addAction(ui->actionZoomIn);
    ctx_menu_.addAction(ui->actionZoomInX);
    ctx_menu_.addAction(ui->actionZoomInY);
    ctx_menu_.addAction(ui->actionZoomOut);
    ctx_menu_.addAction(ui->actionZoomOutX);
    ctx_menu_.addAction(ui->actionZoomOutY);
    ctx_menu_.addAction(ui->actionReset);
    ctx_menu_.addSeparator();
    ctx_menu_.addAction(ui->actionMoveRight10);
    ctx_menu_.addAction(ui->actionMoveLeft10);
    ctx_menu_.addAction(ui->actionMoveUp10);
    ctx_menu_.addAction(ui->actionMoveDown10);
    ctx_menu_.addAction(ui->actionMoveRight1);
    ctx_menu_.addAction(ui->actionMoveLeft1);
    ctx_menu_.addAction(ui->actionMoveUp1);
    ctx_menu_.addAction(ui->actionMoveDown1);
    ctx_menu_.addSeparator();
    ctx_menu_.addAction(ui->actionNextStream);
    ctx_menu_.addAction(ui->actionPreviousStream);
    ctx_menu_.addAction(ui->actionSwitchDirection);
    ctx_menu_.addAction(ui->actionGoToPacket);
    ctx_menu_.addSeparator();
    ctx_menu_.addAction(ui->actionDragZoom);
    ctx_menu_.addAction(ui->actionToggleSequenceNumbers);
    ctx_menu_.addAction(ui->actionToggleTimeOrigin);
    ctx_menu_.addAction(ui->actionCrosshairs);
    ctx_menu_.addSeparator();
    ctx_menu_.addAction(ui->actionRoundTripTime);
    ctx_menu_.addAction(ui->actionThroughput);
    ctx_menu_.addAction(ui->actionStevens);
    ctx_menu_.addAction(ui->actionTcptrace);
    ctx_menu_.addAction(ui->actionWindowScaling);

    memset (&graph_, 0, sizeof(graph_));
    graph_.type = graph_type;
    copy_address(&graph_.src_address, &current.ip_src);
    graph_.src_port = current.th_sport;
    copy_address(&graph_.dst_address, &current.ip_dst);
    graph_.dst_port = current.th_dport;
    graph_.stream = header->th_stream;
    findStream();

    ui->streamNumberSpinBox->blockSignals(true);
    ui->streamNumberSpinBox->setMaximum(get_tcp_stream_count() - 1);
    ui->streamNumberSpinBox->setValue(graph_.stream);
    ui->streamNumberSpinBox->blockSignals(false);

    QCustomPlot *sp = ui->streamPlot;
    QCPPlotTitle *file_title = new QCPPlotTitle(sp, cf_get_display_name(cap_file_));
    file_title->setFont(sp->xAxis->labelFont());
    title_ = new QCPPlotTitle(sp);
    sp->plotLayout()->insertRow(0);
    sp->plotLayout()->addElement(0, 0, file_title);
    sp->plotLayout()->insertRow(0);
    sp->plotLayout()->addElement(0, 0, title_);

    base_graph_ = sp->addGraph(); // All: Selectable segments
    base_graph_->setPen(QPen(QBrush(graph_color_1), 0.25));
    tput_graph_ = sp->addGraph(sp->xAxis, sp->yAxis2); // Throughput: Moving average
    tput_graph_->setPen(QPen(QBrush(graph_color_2), 0.5));
    tput_graph_->setLineStyle(QCPGraph::lsLine);
    seg_graph_ = sp->addGraph(); // tcptrace: fwd segments
    seg_graph_->setErrorType(QCPGraph::etValue);
    seg_graph_->setLineStyle(QCPGraph::lsNone);
    seg_graph_->setScatterStyle(QCPScatterStyle(QCPScatterStyle::ssDot, Qt::transparent, 0));
    seg_graph_->setErrorPen(QPen(QBrush(graph_color_1), 0.5));
    seg_graph_->setErrorBarSize(pkt_point_size_);
    ack_graph_ = sp->addGraph(); // tcptrace: rev ACKs
    ack_graph_->setPen(QPen(QBrush(graph_color_2), 0.5));
    ack_graph_->setLineStyle(QCPGraph::lsStepLeft);
    rwin_graph_ = sp->addGraph(); // tcptrace: rev RWIN
    rwin_graph_->setPen(QPen(QBrush(graph_color_3), 0.5));
    rwin_graph_->setLineStyle(QCPGraph::lsStepLeft);

    tracer_ = new QCPItemTracer(sp);
    sp->addItem(tracer_);

    // Triggers fillGraph().
    ui->graphTypeComboBox->setCurrentIndex(graph_idx);

    sp->setMouseTracking(true);

    sp->yAxis->setLabelColor(QColor(graph_color_1));
    sp->yAxis->setTickLabelColor(QColor(graph_color_1));

    tracer_->setVisible(false);
    toggleTracerStyle(true);

    QPushButton *save_bt = ui->buttonBox->button(QDialogButtonBox::Save);
    save_bt->setText(tr("Save As" UTF8_HORIZONTAL_ELLIPSIS));

    QPushButton *close_bt = ui->buttonBox->button(QDialogButtonBox::Close);
    if (close_bt) {
        close_bt->setDefault(true);
    }

    ProgressFrame::addToButtonBox(ui->buttonBox, parent);

    connect(sp, SIGNAL(mousePress(QMouseEvent*)), this, SLOT(graphClicked(QMouseEvent*)));
    connect(sp, SIGNAL(mouseMove(QMouseEvent*)), this, SLOT(mouseMoved(QMouseEvent*)));
    connect(sp, SIGNAL(mouseRelease(QMouseEvent*)), this, SLOT(mouseReleased(QMouseEvent*)));
    connect(sp, SIGNAL(axisClick(QCPAxis*,QCPAxis::SelectablePart,QMouseEvent*)),
            this, SLOT(axisClicked(QCPAxis*,QCPAxis::SelectablePart,QMouseEvent*)));
    connect(sp->yAxis, SIGNAL(rangeChanged(QCPRange)), this, SLOT(transformYRange(QCPRange)));
    disconnect(ui->buttonBox, SIGNAL(accepted()), this, SLOT(accept()));
    this->setResult(QDialog::Accepted);
}
コード例 #12
0
ファイル: capture_dummy.c プロジェクト: amirsdream/openvcx
static int capture_send_dummy_frames(CAP_SEND_DUMMY_FRAMES_CTXT_T *pCtxt) {
  int rc = 0;
  CAPTURE_CBDATA_SP_T *pSp;
  CAPTURE_STREAM_T *pStream;
  PKTQ_EXTRADATA_T xtra;
  uint64_t pts;
  enum STREAM_NET_ADVFR_RC timeRc;
  unsigned int idx;
  unsigned char buf[16384];
  uint64_t pts0[CAPTURE_MAX_FILTERS];
  uint64_t frameIds[CAPTURE_MAX_FILTERS];
  unsigned int frameSz[CAPTURE_MAX_FILTERS];
  float fps[CAPTURE_MAX_FILTERS];
  TIME_VAL tvStart[CAPTURE_MAX_FILTERS];
  int is_init[CAPTURE_MAX_FILTERS];
  int haveSendOnly = 0;
  CAPTURE_FILTER_T *pFilters;
  unsigned int numFilters;
  CAP_ASYNC_DESCR_T *pCfg = pCtxt->pCfg;
  CAPTURE_STATE_T *pState = pCtxt->pState;

  buf[0] = 0x00;
  memset(frameIds, 0, sizeof(frameIds));
  memset(is_init, 0, sizeof(is_init));
  memset(fps, 0, sizeof(fps));
  memset(frameSz, 0, sizeof(frameSz));
  memset(pts0, 0, sizeof(pts0));
  memset(buf, 0, sizeof(buf));
  pFilters = pState->filt.filters;
  //pFilters = pCfg->pcommon->filters;
  numFilters = pState->filt.numFilters;

  LOG(X_DEBUG("Started dummy input frame processor"));

  while(pCfg->running == STREAMER_STATE_RUNNING  && g_proc_exit == 0 && 
        *pCtxt->prunning == STREAMER_STATE_RUNNING) {

    haveSendOnly = 0;

    for(idx = 0; idx < numFilters; idx++) {

      if(pCfg->pStreamerCfg->cfgrtp.xmitType != SDP_XMIT_TYPE_SENDONLY &&
         pFilters[idx].xmitType != SDP_XMIT_TYPE_SENDONLY) {

        //
        // Clear the xcode decode input flag
        //
        setNoDecode(&pFilters[idx], pCfg->pStreamerCfg, 0);

        if(is_init[idx]) {

          //
          // Restore the xcode input file type
          //
          if(codectype_isVid(pFilters[idx].mediaType)) {
            pCfg->pStreamerCfg->xcode.vid.common.cfgFileTypeIn = pFilters[idx].mediaType;
          } else if(codectype_isAud(pFilters[idx].mediaType)) {
            pCfg->pStreamerCfg->xcode.aud.common.cfgFileTypeIn = pFilters[idx].mediaType;
          }

          pthread_mutex_lock(pCtxt->pmtx);

          if((pStream = findStream(pState, &pFilters[idx]))) {
            //
            // Clear the RTCP BYE reception flag since some clients may have sent an RTCP BYE when going on hold.
            //
            //pStream->haveRtcpBye = 0;
            capture_delete_stream(pState, pStream);
          }

          pthread_mutex_unlock(pCtxt->pmtx);

          is_init[idx] = 0;
        }

        continue;

      } else if(!is_init[idx]) {

        init_filter(pCfg, &pFilters[idx], &fps[idx], &frameSz[idx]); 
        if(frameSz[idx] > sizeof(buf)) {
          LOG(X_ERROR("Capture dummy stream frame size %d exceeds %d"), frameSz[idx], sizeof(buf));
          return -1;
        }

        pthread_mutex_lock(pCtxt->pmtx);

        if(!(pStream = on_new_stream(pState, &pFilters[idx], &pts0[idx]))) {
          pthread_mutex_unlock(pCtxt->pmtx);
          return -1;
        }

        pthread_mutex_unlock(pCtxt->pmtx);

        is_init[idx] = 1;

      }

      haveSendOnly = 1;

      pthread_mutex_lock(pCtxt->pmtx);

      if(!(pStream = findStream(pState, &pFilters[idx])) || !(pSp = pStream->pCbUserData)) {
        LOG(X_ERROR("Unable to find dummy stream for filter media type: %d"), pFilters[idx].mediaType);
        pthread_mutex_unlock(pCtxt->pmtx);
        return -1;
      }

      pthread_mutex_unlock(pCtxt->pmtx);

      //
      // Set the xcode decode input flag
      //
      setNoDecode(&pFilters[idx], pCfg->pStreamerCfg, 1);

      //
      // Add the dummy frame to the input capture queue.  The dummy frame timing is used to 
      // drive the stream frame processor.
      //
      memset(&xtra, 0, sizeof(xtra));
      //xtra.flags = CAPTURE_SP_FLAG_KEYFRAME;

      if((timeRc = stream_net_check_time(&tvStart[idx], &frameIds[idx], fps[idx], 1, &pts, NULL)) ==
        STREAM_NET_ADVFR_RC_OK) {
        xtra.tm.pts = pts + pts0[idx];
        //fprintf(stderr, "ADD_DUMMY_FRAME [%d] fps:%.3f, clockHz:%d pts:%.3f (%llu) (relative pts:%.3f)\n", idx, fps[idx], pSp->pStream->clockHz, PTSF(xtra.tm.pts), xtra.tm.pts, PTSF(pts));
        rc = pktqueue_addpkt(pSp->pCapAction->pQueue, buf, frameSz[idx], &xtra, 1);
        //capture_addCompleteFrameToQ(pSp, ts);
      }

    }

    if(!haveSendOnly) {
      usleep(20000);
    }

  }

  LOG(X_DEBUG("Finished dummy input frame processor"));

  return 0;
}