コード例 #1
0
ファイル: capture_pkt_vp8.c プロジェクト: mmashimaro/openvcx
int cbOnPkt_vp8(void *pUserData, const COLLECT_STREAM_PKTDATA_T *pPkt) {
  int rc = 0;
  CAPTURE_CBDATA_SP_T *pSp = (CAPTURE_CBDATA_SP_T *) pUserData;
  CAPTURE_STORE_CBDATA_T cbData;
  int queueFr = 0;

  if(pSp == NULL) {
    return -1;
  } else if(!pPkt || !pPkt->payload.pData) {
    // Packet was lost
    LOG(X_WARNING("RTP VP8 lost packet, last seq:%u, last ts:%u"), pSp->lastPktSeq, pSp->lastPktTs);
    pSp->spFlags |= (CAPTURE_SP_FLAG_DAMAGEDFRAME | CAPTURE_SP_FLAG_PREVLOST);
    return 0;
  }

  VSX_DEBUG(
    //VSX_DEBUGLOG_TIME
    LOG(X_DEBUG("rtp-recv-vp8 len:%d seq:%u ts:%u(%.3f) ts0:%u(%.3f, dlta:%.3f), mrk:%d ssrc:0x%x "
                   "flags:0x%x pQ:0x%x"),
       PKTCAPLEN(pPkt->payload), pPkt->u.rtp.seq, pPkt->u.rtp.ts, PTSF(pPkt->u.rtp.ts),
       pSp->pStream->ts0,
       (pSp->pStream->clockHz > 0 ? ((double)pSp->pStream->ts0/pSp->pStream->clockHz) : 0),
       (pSp->pStream->clockHz > 0 ? ((double)(pPkt->u.rtp.ts - pSp->pStream->ts0)/pSp->pStream->clockHz) : 0),
       (pPkt->u.rtp.marksz & PKTDATA_RTP_MASK_MARKER)?1:0,
       pSp->pStream->hdr.key.ssrc, pSp->spFlags, pSp->pCapAction && pSp->pCapAction->pQueue ? 1 : 0);
    if(pPkt && PKTCAPLEN(pPkt->payload) > 0) {
      LOGHEX_DEBUG(pPkt->payload.pData, MIN(16, PKTCAPLEN(pPkt->payload)));
    }
    //fprintf(stderr, "tmpFrame.buf.idx:%d\n", pSp->pCapAction->tmpFrameBuf.idx);
  );
コード例 #2
0
ファイル: capture_pkt_silk.c プロジェクト: amirsdream/openvcx
int cbOnPkt_silk(void *pUserData, const COLLECT_STREAM_PKTDATA_T *pPkt) {
  CAPTURE_CBDATA_SP_T *pSp = (CAPTURE_CBDATA_SP_T *) pUserData;
  const unsigned char *pData = pPkt ? pPkt->payload.pData : NULL;
  unsigned int len = pPkt ? PKTCAPLEN(pPkt->payload) : 0;
  CAPTURE_STORE_CBDATA_T cbData;
  int queueFr = 0;
  int rc = 0;

  if(pSp == NULL) {
    return -1;
  } else if(!pPkt || !pPkt->payload.pData) {
    // Packet was lost
    LOG(X_WARNING("RTP SILK lost packet, last seq:%u, last ts:%u"), pSp->lastPktSeq, pSp->lastPktTs);
    pSp->spFlags |= (CAPTURE_SP_FLAG_DAMAGEDFRAME | CAPTURE_SP_FLAG_PREVLOST);
    return 0;
  }

  VSX_DEBUG_RTP(
    LOG(X_DEBUG("RTP - rtp-recv-silk len:%d seq:%u ts:%u(%.3f) ts0:%u(%.3f, dlta:%.3f), mrk:%d ssrc:0x%x "
                   "flags:0x%x pQ:0x%x"),
       PKTCAPLEN(pPkt->payload), pPkt->u.rtp.seq, pPkt->u.rtp.ts, PTSF(pPkt->u.rtp.ts),
       pSp->pStream->ts0,
       (pSp->pStream->clockHz > 0 ? ((double)pSp->pStream->ts0/pSp->pStream->clockHz) : 0),
       (pSp->pStream->clockHz > 0 ? ((double)(pPkt->u.rtp.ts - pSp->pStream->ts0)/pSp->pStream->clockHz) : 0),
       (pPkt->u.rtp.marksz & PKTDATA_RTP_MASK_MARKER)?1:0,
       pSp->pStream->hdr.key.ssrc, pSp->spFlags, pSp->pCapAction && pSp->pCapAction->pQueue ? 1 : 0);
    if(pPkt && PKTCAPLEN(pPkt->payload) > 0) {
      LOGHEX_DEBUG(pPkt->payload.pData, MIN(16, PKTCAPLEN(pPkt->payload)));
    }
  );
コード例 #3
0
ファイル: hdumpdev.c プロジェクト: GregorR/humidity
int main(int argc, char **argv)
{
    int argi, i;
    char *arg, *nextarg;
    PmError perr;
    PtError pterr;

    PmDeviceID dev = -1;
    int list = 0;

    for (argi = 1; argi < argc; argi++) {
        arg = argv[argi];
        nextarg = argv[argi+1];
        if (arg[0] == '-') {
            if (!strcmp(arg, "-l")) {
                list = 1;
            } else if (!strcmp(arg, "-i") && nextarg) {
                dev = atoi(nextarg);
                argi++;
            } else {
                fprintf(stderr, "Invalid invocation.\n");
                exit(1);
            }
        }
    }

    PSF(perr, Pm_Initialize, ());
    PTSF(pterr, Pt_Start, (1, dump, NULL));

    /* list devices */
    if (list) {
        int ct = Pm_CountDevices();
        PmDeviceID def = Pm_GetDefaultInputDeviceID();
        const PmDeviceInfo *devinf;

        for (i = 0; i < ct; i++) {
            devinf = Pm_GetDeviceInfo(i);
            printf("%d%s: %s%s %s\n", i, (def == i) ? "*" : "",
                (devinf->input) ? "I" : "",
                (devinf->output) ? "O" : "",
                devinf->name);
        }
    }

    /* choose device */
    if (dev == -1) {
        fprintf(stderr, "Warning: Using default device.\n");
        dev = Pm_GetDefaultInputDeviceID();
    }

    /* open it for input */
    PSF(perr, Pm_OpenInput, (&stream, dev, NULL, 1024, NULL, NULL));
    PSF(perr, Pm_SetFilter, (stream, PM_FILT_ACTIVE | PM_FILT_SYSEX));

    while (1) Pt_Sleep(1<<31);

    return 0;
}
コード例 #4
0
ファイル: capture_pkt_aac.c プロジェクト: amirsdream/openvcx
int cbOnPkt_aac(void *pUserData, const COLLECT_STREAM_PKTDATA_T *pPkt) {
  CAPTURE_CBDATA_SP_T *pSp = (CAPTURE_CBDATA_SP_T *) pUserData;
  const unsigned char *pData = pPkt ? pPkt->payload.pData : NULL;
  unsigned int len = pPkt ? PKTCAPLEN(pPkt->payload) : 0;
  unsigned char adtsHdr[8];
  const ESDS_DECODER_CFG_T *pDecoderCfg;
  unsigned int numAuFrames;
  unsigned int idx;
  unsigned int payloadLen;
  unsigned int fragmentLen;
  unsigned int payloadStartIdx;
  uint32_t ts;
  CAPTURE_STORE_CBDATA_T cbData;
  int queueFr = 0;
  int rc = 0;


  //
  // TODO: pPkt ts, seq, ssrc, etc... are only set for RTP, if capture config is read
  // from SDP file - and goes through stream_net_av. (streaming capture eonly)
  // capture for recording and stream_net_pes uses deprecated way of queing raw 
  // network data for later depacketization, which calls this cb from the queue reader
  //

  if(pSp == NULL) {
    return -1;
  } else if(pData == NULL) {
    // Packet was lost
    LOG(X_WARNING("Lost AAC-hbr RTP packet"));
    pSp->spFlags &= ~CAPTURE_SP_FLAG_INFRAGMENT;
    return 0;
  }

  VSX_DEBUG_RTP(
    LOG(X_DEBUG("RTP - rtp-recv-aac len:%d seq:%u ts:%u(%.3f) ts0:%u(%.3f, dlta:%.3f), mrk:%d ssrc:0x%x "
                   "flags:0x%x pQ:0x%x"),
       PKTCAPLEN(pPkt->payload), pPkt->u.rtp.seq, pPkt->u.rtp.ts, PTSF(pPkt->u.rtp.ts),
       pSp->pStream->ts0,
       (pSp->pStream->clockHz > 0 ? ((double)pSp->pStream->ts0/pSp->pStream->clockHz) : 0),
       (pSp->pStream->clockHz > 0 ? ((double)(pPkt->u.rtp.ts - pSp->pStream->ts0)/pSp->pStream->clockHz) : 0),
       (pPkt->u.rtp.marksz & PKTDATA_RTP_MASK_MARKER)?1:0,
       pSp->pStream->hdr.key.ssrc, pSp->spFlags, pSp->pCapAction && pSp->pCapAction->pQueue ? 1 : 0);
    if(pPkt) {
      LOGHEX_DEBUG(pPkt->payload.pData, MIN(16, PKTCAPLEN(pPkt->payload)));
    }
  );
コード例 #5
0
ファイル: playfile.c プロジェクト: GregorR/midifile
int main(int argc, char **argv)
{
    FILE *f;
    PmError perr;
    PtError pterr;
    MfFile *pf;
    int argi, i;
    char *arg, *nextarg, *file;

    PmDeviceID dev = -1;
    int list = 0;
    file = NULL;

    for (argi = 1; argi < argc; argi++) {
        arg = argv[argi];
        nextarg = argv[argi+1];
        if (arg[0] == '-') {
            if (!strcmp(arg, "-l")) {
                list = 1;
            } else if (!strcmp(arg, "-o") && nextarg) {
                dev = atoi(nextarg);
                argi++;
            } else {
                fprintf(stderr, "Invalid invocation.\n");
                exit(1);
            }
        } else {
            file = arg;
        }
    }

    PSF(perr, Pm_Initialize, ());
    PSF(perr, Mf_Initialize, ());
    PTSF(pterr, Pt_Start, (1, play, NULL));

    /* list devices */
    if (list) {
        int ct = Pm_CountDevices();
        PmDeviceID def = Pm_GetDefaultInputDeviceID();
        const PmDeviceInfo *devinf;

        for (i = 0; i < ct; i++) {
            devinf = Pm_GetDeviceInfo(i);
            printf("%d%s: %s%s %s\n", i, (def == i) ? "*" : "",
                (devinf->input) ? "I" : "",
                (devinf->output) ? "O" : "",
                devinf->name);
        }
    }

    /* choose device */
    if (dev == -1) {
        fprintf(stderr, "No device selected.\n");
        exit(1);
    }

    /* open it for input */
    PSF(perr, Pm_OpenOutput, (&ostream, dev, NULL, 1024, NULL, NULL, 0));

    /* open it for input */
    f = fopen(file, "rb");
    if (f == NULL) {
        perror(file);
        exit(1);
    }

    /* and read it */
    PSF(perr, Mf_ReadMidiFile, (&pf, f));
    fclose(f);

    /* now start running */
    stream = Mf_OpenStream(pf);
    Mf_StartStream(stream, Pt_Time());

    /* FIXME: I sure hope this doesn't get reordered >_> */
    ready = 1;

    while (1) Pt_Sleep(1<<30);

    return 0;
}
コード例 #6
0
ファイル: stream_net_av.c プロジェクト: amirsdream/openvcx
static enum STREAM_NET_ADVFR_RC checktiming(STREAM_AV_DATA_T *pData) {
  enum STREAM_NET_ADVFR_RC rc = STREAM_NET_ADVFR_RC_OK;
  STREAM_AV_DATA_T *pDataComplement;
  int64_t ptsdelta;
  uint64_t tm0, tm1;

  //fprintf(stderr, "checktiming numF:%d, haveOffset:%d, %.3f\n", pData->numFrames, pData->pXcodeData->haveFrameTmStartOffset, PTSF(pData->pXcodeData->frameTmStartOffset));

  if(pData->numFrames == 0) {

    if(!pData->pXcodeData->haveFrameTmStartOffset) {

      //
      // Set the start of frame reception time
      //
      pData->pXcodeData->frameTmStartOffset = pData->curPesTm.qtm.dts ?
                 pData->curPesTm.qtm.dts : pData->curPesTm.qtm.pts;
      //fprintf(stderr, "cb pts:%.3f dts:%.3f\n", PTSF(pData->curPesTm.qtm.pts), PTSF(pData->curPesTm.qtm.dts));
      LOG(X_DEBUG("Setting av %s stream "MP2PES_STREAMTYPE_FMT_STR" pts start offset to %.3f"),
         IS_STREAM_PES_DATA_VID(pData) ? "video" : "audio",
         MP2PES_STREAMTYPE_FMT_ARGS(pData->pXcodeData->inStreamType), 
         PTSF(pData->pXcodeData->frameTmStartOffset));

      pData->pXcodeData->haveFrameTmStartOffset = 1;
    }

    if((pDataComplement = getComplement(pData))) {
      pktqueue_setrdr((PKTQUEUE_T *) pDataComplement->pDataSrc->pCbData, 0);

      if((pData->streamflags & VSX_STREAMFLAGS_AV_SAME_START_TM) &&
         !pDataComplement->pXcodeData->haveFrameTmStartOffset) {
        pDataComplement->pXcodeData->frameTmStartOffset = 
                                     pData->pXcodeData->frameTmStartOffset;
        LOG(X_DEBUG("Setting av %s complementary stream "MP2PES_STREAMTYPE_FMT_STR
                    " pts to matching start offset to %.3f"), 
              IS_STREAM_PES_DATA_VID(pData) ? "audio" : "video",
             MP2PES_STREAMTYPE_FMT_ARGS(pDataComplement->pXcodeData->inStreamType), 
              pDataComplement->pXcodeData->frameTmStartOffset);
        pDataComplement->pXcodeData->haveFrameTmStartOffset = 1;
      } 
    }

  } else {

    tm0 = pData->prevPesTm.qtm.dts ? pData->prevPesTm.qtm.dts :
          pData->prevPesTm.qtm.pts;
    tm1 = pData->curPesTm.qtm.dts ? pData->curPesTm.qtm.dts :
          pData->curPesTm.qtm.pts;
    ptsdelta = tm1 - tm0;

    if((ptsdelta > 0 && ptsdelta > 5000 * PTS_HZ_MS) ||
       (ptsdelta < 0 && ptsdelta < -1 * 5000 * PTS_HZ_MS)) {

      LOG(X_WARNING("Large pts %s jump pts:%.3f dts:%.3f -> "
        "pts:%.3f dts:%.3f (%"LL64"d %.3fsec)"), 
         IS_STREAM_PES_DATA_VID(pData) ? "video" : "audio",
          PTSF(pData->prevPesTm.qtm.pts), PTSF(pData->prevPesTm.qtm.dts), 
          PTSF(pData->curPesTm.qtm.pts), PTSF(pData->curPesTm.qtm.dts), 
          ptsdelta, PTSF(ptsdelta));

      rc = STREAM_NET_ADVFR_RC_RESET_TMGAP;

    } else if(pData->curPesTm.qtm.pts < pData->prevPesTm.qtm.pts &&
              ((pData->curPesTm.qtm.dts == 0 && pData->prevPesTm.qtm.dts == 0) ||
              (pData->curPesTm.qtm.dts != 0 && pData->prevPesTm.qtm.dts != 0 &&
              pData->curPesTm.qtm.dts + PTS_HZ_SEC < pData->prevPesTm.qtm.dts))) {

      LOG(X_WARNING("%s program time went backwards pts: %"LL64"uHz %.3f -> %"LL64"uHz %.3f"
                    ", dts: %.3f -> %.3f"), 
         IS_STREAM_PES_DATA_VID(pData) ? "video" : "audio",
         pData->prevPesTm.qtm.pts, PTSF(pData->prevPesTm.qtm.pts),
         pData->curPesTm.qtm.pts, PTSF(pData->curPesTm.qtm.pts),
         PTSF(pData->prevPesTm.qtm.dts), PTSF(pData->curPesTm.qtm.dts));

      rc = STREAM_NET_ADVFR_RC_RESET_TMBKWD;
    }
  }

  pData->numFrames++;
  memcpy(&pData->prevPesTm.qtm, &pData->curPesTm.qtm, sizeof(pData->prevPesTm.qtm));

  return rc;
}
コード例 #7
0
ファイル: stream_net_av.c プロジェクト: amirsdream/openvcx
enum STREAM_NET_ADVFR_RC stream_net_av_advanceFrame(STREAM_NET_ADVFR_DATA_T *pArg) {

  STREAM_AV_DATA_T *pData = (STREAM_AV_DATA_T *) pArg->pArgIn;
  STREAM_PES_T *pPes = (STREAM_PES_T *) pData->pPes;
  enum STREAM_NET_ADVFR_RC rc = STREAM_NET_ADVFR_RC_OK;
  PKTQUEUE_T *pQ;
  const PKTQUEUE_PKT_T *pQPkt;

  if(!pData->pDataSrc || !pData->pDataSrc->pCbData) {
    if(pArg->plen) {
      *pArg->plen = 0;
    }
    return STREAM_NET_ADVFR_RC_NOCONTENT;
  }

  if(pArg->pkeyframeIn) {
    *pArg->pkeyframeIn = 0;
  }

  //fprintf(stderr, "av_advanceFrame called for %s stype:0x%x\n",  pData == &pPes->vid ? "vid" : "aud", pData->pXcodeData->inStreamType);

  waitfordata(pData);

  pQ = (PKTQUEUE_T *) pData->pDataSrc->pCbData;

  if(!pktqueue_havepkt(pQ) || !(pQPkt = pktqueue_readpktdirect(pQ))) {
    //fprintf(stderr, "ad_advanceFrame NOTAVAIL qid:%d haveData:%d wr-1:%d\n", pQ->cfg.id, pQ->haveData, pQ->uniqueWrIdx - 1);
    pktqueue_readpktdirect_done(pQ);
    return STREAM_NET_ADVFR_RC_NOTAVAIL;
  }

  //
  // Avoid memcpy of the frame data
  //
  if(pktqueue_swapreadslots(pQ, &pData->pXcodeData->curFrame.pSwappedSlot) < 0) {
    LOG(X_ERROR("Failed to swap slot in queue id:%d"), pQ->cfg.id);
    pktqueue_readpktdirect_done(pQ);
    return STREAM_NET_ADVFR_RC_ERROR;
  } else {
    pQPkt = pData->pXcodeData->curFrame.pSwappedSlot;
  }

  //fprintf(stderr, "stream_net_av advanceFr pQ[%d] len:%d\n", pQ->cfg.id, pQPkt->len);

  // Note this is just a shallow copy, not of the frame data contents
  memcpy(&pData->pXcodeData->curFrame.pkt, pQPkt, sizeof(pData->pXcodeData->curFrame.pkt));

  //fprintf(stderr, "ad_advanceFrame got fr len:%d wrIdx:%d pQ->userDataType:0x%x\n", pQPkt->len,pData->pXcodeData->curFrame.pkt.idx, pQ->cfg.userDataType); 

  pData->pXcodeData->curFrame.idxReadInFrame = 0;
  pData->pXcodeData->curFrame.idxReadFrame = pQ->idxRd;

  //fprintf(stderr, "AVREAD PKT FROM Q pts:%.3f dts:%.3f\n", PTSF(pData->pXcodeData->curFrame.pkt.xtra.tm.pts), PTSF(pData->pXcodeData->curFrame.pkt.xtra.tm.dts));

  pktqueue_readpktdirect_done(pQ);

#if 1
  //
  // If we're reading video & audio fromlive capture, and the input video sequence headers have not yet been set,
  // then just keep advancing the audio queue rdr position, otherwise, we keep on queing audio frames,
  // then when the vid seq start has been detected, the audio frames will be read, but by then it's possible
  // the audio queue has been reset, overwriting some 'to-be-played' audio frames
  //
  //fprintf(stderr, "TRY... vid:%d, xcodevid:%d, vid in-seq:%d, complement:0x%x\n", IS_STREAM_PES_DATA_VID(pData), pData->pXcodeData->piXcode->vid.common.cfgDo_xcode,  ((STREAM_XCODE_VID_UDATA_T *) pData->pXcodeData->piXcode->vid.pUserData)->haveSeqStart, getComplement(pData));
  STREAM_AV_DATA_T *pDataComplement;
  PKTQUEUE_T *pQComplement;
  
  if(IS_STREAM_PES_DATA_VID(pData) && pData->pXcodeData->piXcode->vid.common.cfgDo_xcode &&
     !((STREAM_XCODE_VID_UDATA_T *) pData->pXcodeData->piXcode->vid.pUserData)->haveSeqStart &&
     (pDataComplement = getComplement(pData)) &&
     (pQComplement = (PKTQUEUE_T *) pDataComplement->pDataSrc->pCbData)) {

    // Only retain the last 'x' elements in the queue... to prevent any overwrite / reset

    //fprintf(stderr, "Setting av complement reader to catchup.  haveFrameTmOffset:%d %.3f\n", pDataComplement->pXcodeData->haveFrameTmStartOffset, PTSF(pDataComplement->pXcodeData->frameTmStartOffset));

    pktqueue_setrdr(pQComplement, 1);

    if(!pDataComplement->pXcodeData->haveFrameTmStartOffset) {

      pthread_mutex_lock(&pQComplement->mtx);

      if(pQComplement->idxRd != pQComplement->idxWr) {
        LOG(X_DEBUG("Setting av %s stream "MP2PES_STREAMTYPE_FMT_STR" pts start offset from %.3f to %.3f"),
           IS_STREAM_PES_DATA_VID(pData) ? "audio" : "video",
           MP2PES_STREAMTYPE_FMT_ARGS(pData->pXcodeData->inStreamType), 
           PTSF(pData->pXcodeData->frameTmStartOffset),
           PTSF(pQComplement->pkts[pQComplement->idxRd].xtra.tm.pts));

        LOG(X_DEBUG("Setting av complement to idxRd:%d, idxWr:%d, rd:%.3f, wr:%.3f, wr-1:%.3f"), pQComplement->idxRd, pQComplement->idxWr, PTSF(pQComplement->pkts[pQComplement->idxRd].xtra.tm.pts), PTSF(pQComplement->pkts[pQComplement->idxWr].xtra.tm.pts), PTSF(pQComplement->pkts[pQComplement->idxWr == 0 ? pQComplement->cfg.maxPkts-1 : pQComplement->idxWr -1].xtra.tm.pts));
        pDataComplement->pXcodeData->frameTmStartOffset = pQComplement->pkts[pQComplement->idxRd].xtra.tm.pts;
        pDataComplement->pXcodeData->haveFrameTmStartOffset = 1;
      }

      pthread_mutex_unlock(&pQComplement->mtx);

    }

    //pDataComplement->pXcodeData->frameTmStartOffset = pData->pXcodeData->frameTmStartOffset;
  }
#endif // 0

  VSX_DEBUGLOG3("lastQWrIdx now:%d\n", pData->lastQWrIdx);

  memcpy(&pData->curPesTm.qtm, &pData->pXcodeData->curFrame.pkt.xtra.tm, 
         sizeof(pData->curPesTm.qtm));

  // Do not use PKTQUEUE_T pkt contents directly to allow for any
  // prebuf contents such as SPS / PPS packaged w/ each I-frame    
  pData->pXcodeData->curFrame.pData = pData->pXcodeData->curFrame.pkt.pData;    
  pData->pXcodeData->curFrame.lenData = pData->pXcodeData->curFrame.pkt.len;
  pArg->isvid = (pData == &pPes->vid) ? 1 : 0;

  if(pArg->plen) {
    *pArg->plen = pData->pXcodeData->curFrame.lenData;
  }
  
  //if((rc = checktiming(pData)) == STREAM_NET_ADVFR_RC_RESET_TMGAP) {
  if((rc = checktiming(pData)) == STREAM_NET_ADVFR_RC_RESET_TMGAP || rc == STREAM_NET_ADVFR_RC_RESET_TMBKWD) {
    stream_net_av_reset(pData->pPes);
  }

  if(pArg->pPts) {
    *pArg->pPts = xcode_getFrameTm(pData->pXcodeData, 0, 0);
    //fprintf(stderr, "AV curF: pts:%.3f dts:%.3f start:%.3f, fr:%.3f (%llu)\n", PTSF(pData->pXcodeData->curFrame.pkt.xtra.tm.pts), PTSF(pData->pXcodeData->curFrame.pkt.xtra.tm.dts), PTSF(pData->pXcodeData->frameTmStartOffset), PTSF(*pArg->pPts), *pArg->pPts);
  }


  if(pArg->pkeyframeIn && (pData->pXcodeData->curFrame.pkt.flags & PKTQUEUE_FLAG_KEYFRAME)) {
    //k(pData->pXcodeData->curFrame.pkt.xtra.flags & CAPTURE_SP_FLAG_KEYFRAME)) {
    *pArg->pkeyframeIn = 1;
  } else {
    *pArg->pkeyframeIn = 0;
  }

  pArg->codecType = pQ->cfg.userDataType;

  //LOG(X_DEBUG("av_advanceFrame %s key:%d rc:%d  pts:%.3f (%.3f) (dts:%.3f) start:%.3f len:%u, Q[rd:%d,wr:%d/%d]"), pData == &pPes->vid ? "vid" : "aud", *pArg->pkeyframeIn, rc, PTSF(*pArg->pPts), PTSF(pData->pXcodeData->curFrame.pkt.xtra.tm.pts), PTSF(pData->pXcodeData->curFrame.pkt.xtra.tm.dts), PTSF(pData->pXcodeData->frameTmStartOffset), pData->pXcodeData->curFrame.lenData, pQ->idxRd, pQ->idxWr, pQ->cfg.maxPkts); 


  return rc;
}