Example #1
0
static int ffmpeg_frame_is_sync (codec_data_t *ifptr,
                                 uint8_t *buffer,
                                 uint32_t buflen,
                                 void *userdata)
{
    int ret;
    int ftype;
    uint32_t offset;
    ffmpeg_codec_t *ffmpeg = (ffmpeg_codec_t *)ifptr;
    switch (ffmpeg->m_codecId) {
    case CODEC_ID_H264:
        // look for idr nal
        do {
            uint8_t nal_type = h264_nal_unit_type(buffer);
            if (nal_type == H264_NAL_TYPE_SEQ_PARAM) return 1;
            //ffmpeg_message(LOG_DEBUG, "ffmpeg", "nal type %u", nal_type);
            if (h264_nal_unit_type_is_slice(nal_type)) {
                if (nal_type == H264_NAL_TYPE_IDR_SLICE) return 1;
#if 0
                uint8_t slice_type;
                if (h264_find_slice_type(buffer, buflen, &slice_type) >= 0) {
                    return H264_TYPE_IS_I(slice_type) ? 1 : 0;
                }
                return 0;
#else
                return 0;
#endif
            }
            offset = h264_find_next_start_code(buffer, buflen);
            buffer += offset;
            buflen -= offset;
        } while (offset != 0);
        break;
    case CODEC_ID_MPEG2VIDEO:
        // this would be for mpeg2
        ret = MP4AV_Mpeg3FindPictHdr(buffer, buflen, &ftype);
        ffmpeg_message(LOG_ERR, "ffmpeg", "ret %u type %u", ret, ftype);
        if (ret >= 0 && ftype == 1) {
            return 1;
        }
        break;
    case CODEC_ID_MPEG4: {
        uint8_t *vop = MP4AV_Mpeg4FindVop(buffer, buflen);
        if (vop == NULL) return 0;
        if (MP4AV_Mpeg4GetVopType(vop, buflen - (vop - buffer)) == VOP_TYPE_I)
            return 1;
    }
    break;
    default:
        // for every other, return that it is sync
        return 1;
    }
    return 0;
}
Example #2
0
extern "C" bool MP4AV_Rfc3016_HintAddSample (
    MP4FileHandle mp4File,
    MP4TrackId hintTrackId,
    MP4SampleId sampleId,
    uint8_t *pSampleBuffer,
    uint32_t sampleSize,
    MP4Duration duration,
    MP4Duration renderingOffset,
    bool isSyncSample,
    uint16_t maxPayloadSize)
{
    bool isBFrame =
        (MP4AV_Mpeg4GetVopType(pSampleBuffer, sampleSize) == VOP_TYPE_B);

    if (MP4AddRtpVideoHint(mp4File, hintTrackId, isBFrame, renderingOffset) == false)
        return false;

    if (sampleId == 1) {
        if (MP4AddRtpESConfigurationPacket(mp4File, hintTrackId) == false) return false;
    }

    u_int32_t offset = 0;
    u_int32_t remaining = sampleSize;

    // TBD should scan for resync markers (if enabled in ES config)
    // and packetize on those boundaries

    while (remaining) {
        bool isLastPacket = false;
        u_int32_t length;

        if (remaining <= maxPayloadSize) {
            length = remaining;
            isLastPacket = true;
        } else {
            length = maxPayloadSize;
        }

        if (MP4AddRtpPacket(mp4File, hintTrackId, isLastPacket) == false ||

                MP4AddRtpSampleData(mp4File, hintTrackId, sampleId,
                                    offset, length) == false) return false;

        offset += length;
        remaining -= length;
    }

    return MP4WriteRtpHint(mp4File, hintTrackId, duration, isSyncSample);
}
Example #3
0
static int xvid_frame_is_sync (codec_data_t *ptr,
			       uint8_t *buffer, 
			       uint32_t buflen,
			       void *userdata)
{
  int vop_type;

  while (buflen > 3 && 
	 !(buffer[0] == 0 && buffer[1] == 0 && 
	   buffer[2] == 1 && buffer[3] == MP4AV_MPEG4_VOP_START)) {
    buffer++;
    buflen--;
  }

  vop_type = MP4AV_Mpeg4GetVopType(buffer, buflen);

  if (vop_type == VOP_TYPE_I) return 1;
  return 0;
}
static void ParseMpeg4 (uint8_t *bptr, uint32_t blen, bool dump_off)
{
  uint8_t *fptr = bptr;
  while (blen > 4) {
    if (bptr[0] == 0 &&
	bptr[1] == 0 &&
	bptr[2] == 1) {
      if (bptr[3] > 0 && bptr[3] < MP4AV_MPEG4_VOL_START) {
	printf(" VDOS");
      } else if (bptr[3] < 0x2f) {
	printf(" VOL");
      } else if (bptr[3] == MP4AV_MPEG4_VOSH_START) {
	printf(" VOSH");
      } else if (bptr[3] == MP4AV_MPEG4_VOSH_END) {
	printf(" VOSHE");
      } else if (bptr[3] == MP4AV_MPEG4_USER_DATA_START) {
	printf(" UD");
      } else if (bptr[3] == MP4AV_MPEG4_GOV_START) {
	printf(" GOV");
      } else if (bptr[3] == 0xB4) {
	printf(" VSE");
      } else if (bptr[3] == MP4AV_MPEG4_VO_START) {
	printf(" VOS");
      } else if (bptr[3] == MP4AV_MPEG4_VOP_START) {
	int type = MP4AV_Mpeg4GetVopType(bptr, blen);
	switch (type) {
	case VOP_TYPE_I: printf(" VOP-I"); break;
	case VOP_TYPE_P: printf(" VOP-P"); break;
	case VOP_TYPE_B: printf(" VOP-B"); break;
	case VOP_TYPE_S: printf(" VOP-S"); break;
	}
      } else printf(" 0x%x", bptr[3]);
      uint32_t off = bptr - fptr;
      if (dump_off) printf("(%u)", off);
    }
    bptr++;
    blen--;
  }
}
Example #5
0
static int iso_frame_is_sync (codec_data_t *ptr,
			      uint8_t *buffer, 
			      uint32_t buflen,
			      void *userdata)
{
  u_char vop_type;

  while (buflen > 3 && 
	 !(buffer[0] == 0 && buffer[1] == 0 && 
	   buffer[2] == 1 && buffer[3] == MP4AV_MPEG4_VOP_START)) {
    buffer++;
    buflen--;
  }

  vop_type = MP4AV_Mpeg4GetVopType(buffer, buflen);
#if 0
  {
  iso_decode_t *iso = (iso_decode_t *)ptr;
  iso_message(LOG_DEBUG, "iso", "return from get vop is %c %d", vop_type, vop_type);
  }
#endif
  if (vop_type == VOP_TYPE_I) return 1;
  return 0;
}
Example #6
0
MP4TrackId Mp4vCreator(MP4FileHandle mp4File, FILE* inFile, bool doEncrypt,
		       bool allowVariableFrameRate)
{
    bool rc;

    u_int8_t sampleBuffer[256 * 1024 * 2];
    u_int8_t* pCurrentSample = sampleBuffer;
    u_int32_t maxSampleSize = sizeof(sampleBuffer) / 2;
    u_int32_t prevSampleSize = 0;

    // the current syntactical object
    // typically 1:1 with a sample
    // but not always, i.e. non-VOP's
    u_int8_t* pObj = pCurrentSample;
    u_int32_t objSize;
    u_int8_t objType;

    // the current sample
    MP4SampleId sampleId = 1;
    MP4Timestamp currentSampleTime = 0;

    // the last reference VOP
    MP4SampleId refVopId = 1;
    MP4Timestamp refVopTime = 0;

    // track configuration info
    u_int8_t videoProfileLevel = MPEG4_SP_L3;
    u_int8_t timeBits = 15;
    u_int16_t timeTicks = 30000;
    u_int16_t frameDuration = 3000;
    u_int16_t frameWidth = 320;
    u_int16_t frameHeight = 240;
    u_int32_t esConfigSize = 0;
    int vopType = 0;
    int prevVopType = 0;
    bool foundVOSH = false, foundVO = false, foundVOL = false;
    u_int32_t lastVopTimeIncrement = 0;
    bool variableFrameRate = false;
    bool lastFrame = false;
    bool haveBframes = false;
    mpeg4_frame_t *head = NULL, *tail = NULL;

    // start reading objects until we get the first VOP
    while (LoadNextObject(inFile, pObj, &objSize, &objType)) {
        // guard against buffer overflow
        if (pObj + objSize >= pCurrentSample + maxSampleSize) {
            fprintf(stderr,
                    "%s: buffer overflow, invalid video stream?\n", ProgName);
            return MP4_INVALID_TRACK_ID;
        }
#ifdef DEBUG_MP4V
        if (Verbosity & MP4_DETAILS_SAMPLE) {
            printf("MP4V type %x size %u\n",
                    objType, objSize);
        }
#endif

        if (objType == MP4AV_MPEG4_VOSH_START) {
            MP4AV_Mpeg4ParseVosh(pObj, objSize,
                    &videoProfileLevel);
            foundVOSH = true;
        } else if (objType == MP4AV_MPEG4_VO_START) {
            foundVO = true;
        } else if (objType == MP4AV_MPEG4_VOL_START) {
            MP4AV_Mpeg4ParseVol(pObj, objSize,
                    &timeBits, &timeTicks, &frameDuration,
                    &frameWidth, &frameHeight);

            foundVOL = true;
#ifdef DEBUG_MP4V
            printf("ParseVol: timeBits %u timeTicks %u frameDuration %u\n",
                    timeBits, timeTicks, frameDuration);
#endif

        } else if (foundVOL == true || objType == MP4AV_MPEG4_VOP_START) {
            esConfigSize = pObj - pCurrentSample;
            // ready to set up mp4 track
            break;
        }
        /* XXX why do we need this if ?
         * It looks like it will remove this object ... XXX */
	// It does.  On Purpose.  wmay 6/2004
        if (objType != MP4AV_MPEG4_USER_DATA_START) {
            pObj += objSize;
        }
    }

    if (foundVOSH == false) {
        fprintf(stderr,
                "%s: no VOSH header found in MPEG-4 video.\n"
                "This can cause problems with players other than mp4player. \n",
                ProgName);
    } else {
        if (VideoProfileLevelSpecified &&
                videoProfileLevel != VideoProfileLevel) {
            fprintf(stderr,
                    "%s: You have specified a different video profile level than was detected in the VOSH header\n"
                    "The level you specified was %d and %d was read from the VOSH\n",
                    ProgName, VideoProfileLevel, videoProfileLevel);
        }
    }
    if (foundVO == false) {
        fprintf(stderr,
                "%s: No VO header found in mpeg-4 video.\n"
                "This can cause problems with players other than mp4player\n",
                ProgName);
    }
    if (foundVOL == false) {
        fprintf(stderr,
                "%s: fatal: No VOL header found in mpeg-4 video stream\n",
                ProgName);
        return MP4_INVALID_TRACK_ID;
    }

    // convert frame duration to canonical time scale
    // note zero value for frame duration signals variable rate video
    if (timeTicks == 0) {
        timeTicks = 1;
    }
    u_int32_t mp4FrameDuration = 0;

    if (VideoFrameRate) {
      mp4FrameDuration = (u_int32_t)(((double)Mp4TimeScale) / VideoFrameRate);    
    } else if (frameDuration) {
	  VideoFrameRate = frameDuration;
	  VideoFrameRate /= timeTicks;
	  mp4FrameDuration = (Mp4TimeScale * frameDuration) / timeTicks;
    } else {
      if (allowVariableFrameRate == false ) {
	fprintf(stderr,
		"%s: variable rate video stream signalled,"
		" please specify average frame rate with -r option\n"
		" or --variable-frame-rate argument\n",
		ProgName);
	return MP4_INVALID_TRACK_ID;
      }

        variableFrameRate = true;
    }

    ismacryp_session_id_t ismaCrypSId;
    mp4v2_ismacrypParams *icPp =  (mp4v2_ismacrypParams *) malloc(sizeof(mp4v2_ismacrypParams));
    memset(icPp, 0, sizeof(mp4v2_ismacrypParams));


    // initialize ismacryp session if encrypting
    if (doEncrypt) {

        if (ismacrypInitSession(&ismaCrypSId,KeyTypeVideo) != 0) {
            fprintf(stderr, "%s: could not initialize the ISMAcryp session\n",
                    ProgName);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetScheme(ismaCrypSId, &(icPp->scheme_type)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp scheme type. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetSchemeVersion(ismaCrypSId, &(icPp->scheme_version)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp scheme ver. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetKMSUri(ismaCrypSId, &(icPp->kms_uri)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp kms uri. sid %d\n",
                    ProgName, ismaCrypSId);
            CHECK_AND_FREE(icPp->kms_uri);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if ( ismacrypGetSelectiveEncryption(ismaCrypSId, &(icPp->selective_enc)) != ismacryp_rc_ok ) {
            fprintf(stderr, "%s: could not get ismacryp selec enc. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetKeyIndicatorLength(ismaCrypSId, &(icPp->key_ind_len)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp key ind len. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
        if (ismacrypGetIVLength(ismaCrypSId, &(icPp->iv_len)) != ismacryp_rc_ok) {
            fprintf(stderr, "%s: could not get ismacryp iv len. sid %d\n",
                    ProgName, ismaCrypSId);
            ismacrypEndSession(ismaCrypSId);
            return MP4_INVALID_TRACK_ID;
        }
    }

    // create the new video track
    MP4TrackId trackId;
    if (doEncrypt) {
        trackId =
            MP4AddEncVideoTrack(
                    mp4File,
                    Mp4TimeScale,
                    mp4FrameDuration,
                    frameWidth,
                    frameHeight,
                    icPp,
                    MP4_MPEG4_VIDEO_TYPE);
    } else {
        trackId =
            MP4AddVideoTrack(
                    mp4File,
                    Mp4TimeScale,
                    mp4FrameDuration,
                    frameWidth,
                    frameHeight,
                    MP4_MPEG4_VIDEO_TYPE);
    }

    if (trackId == MP4_INVALID_TRACK_ID) {
        fprintf(stderr,
                "%s: can't create video track\n", ProgName);
        return MP4_INVALID_TRACK_ID;
    }

    if (VideoProfileLevelSpecified) {
        videoProfileLevel = VideoProfileLevel;
    }
    if (MP4GetNumberOfTracks(mp4File, MP4_VIDEO_TRACK_TYPE) == 1) {
        MP4SetVideoProfileLevel(mp4File, videoProfileLevel);
    }
    printf("es config size is %d\n", esConfigSize);
    if (esConfigSize) {
        MP4SetTrackESConfiguration(mp4File, trackId,
                pCurrentSample, esConfigSize);

        // move past ES config, so it doesn't go into first sample
        pCurrentSample += esConfigSize;
    }
    // Move the current frame to the beginning of the
    // buffer
    memmove(sampleBuffer, pCurrentSample, pObj - pCurrentSample + objSize);
    pObj = sampleBuffer + (pObj - pCurrentSample);
    pCurrentSample = sampleBuffer;
    MP4Timestamp prevFrameTimestamp = 0;

    // now process the rest of the video stream
    while ( true ) {
        if ( objType != MP4AV_MPEG4_VOP_START ) {
	  // keep it in the buffer until a VOP comes along
	  // Actually, do nothings, since we only want VOP
	  // headers in the stream - wmay 6/2004
	  //pObj += objSize;

        } else { // we have VOP
            u_int32_t sampleSize = (pObj + objSize) - pCurrentSample;

            vopType = MP4AV_Mpeg4GetVopType(pObj, objSize);

	    mpeg4_frame_t *fr = MALLOC_STRUCTURE(mpeg4_frame_t);
	    if (head == NULL) {
	      head = tail = fr;
	    } else {
	      tail->next = fr;
	      tail = fr;
	    }
	    fr->vopType = vopType;
	    fr->frameTimestamp = currentSampleTime;
	    fr->next = NULL;
            if ( variableFrameRate ) {
                // variable frame rate:  recalculate "mp4FrameDuration"
                if ( lastFrame ) {
                    // last frame
                    mp4FrameDuration = Mp4TimeScale / timeTicks;
                } else {
                    // not the last frame
                    u_int32_t vopTimeIncrement;
                    MP4AV_Mpeg4ParseVop(pObj, objSize, &vopType, timeBits, timeTicks, &vopTimeIncrement);
                    u_int32_t vopTime = vopTimeIncrement - lastVopTimeIncrement;
                    mp4FrameDuration = (Mp4TimeScale * vopTime) / timeTicks;
                    lastVopTimeIncrement = vopTimeIncrement % timeTicks;
                }
	    }
            if ( prevSampleSize > 0 ) { // not the first time
                // fill sample data & length to write
                u_int8_t* sampleData2Write = NULL;
                u_int32_t sampleLen2Write = 0;
                if ( doEncrypt ) {
                    if ( ismacrypEncryptSampleAddHeader(ismaCrypSId,
                                sampleSize,
                                sampleBuffer,
                                &sampleLen2Write,
                                &sampleData2Write) != 0 ) {
                        fprintf(stderr,
                                "%s: can't encrypt video sample and add header %u\n",
                                ProgName, sampleId);
                    }
                } else {
                    sampleData2Write = sampleBuffer;
                    sampleLen2Write = prevSampleSize;
                }

		
            if (variableFrameRate == false) {
	      double now_calc;
	      now_calc = sampleId;
	      now_calc *= Mp4TimeScale;
	      now_calc /= VideoFrameRate;
	      MP4Timestamp now_ts = (MP4Timestamp)now_calc;
	      mp4FrameDuration = now_ts - prevFrameTimestamp;
	      prevFrameTimestamp = now_ts;
	      currentSampleTime = now_ts;
	    }
                // Write the previous sample
                rc = MP4WriteSample(mp4File, trackId,
                        sampleData2Write, sampleLen2Write,
                        mp4FrameDuration, 0, prevVopType == VOP_TYPE_I);

                if ( doEncrypt && sampleData2Write ) {
                    // buffer allocated by encrypt function.
                    // must free it!
                    free(sampleData2Write);
                }

                if ( !rc ) {
                    fprintf(stderr,
                            "%s: can't write video frame %u\n",
                            ProgName, sampleId);
                    MP4DeleteTrack(mp4File, trackId);
                    return MP4_INVALID_TRACK_ID;
                }

                // deal with rendering time offsets
                // that can occur when B frames are being used
                // which is the case for all profiles except Simple Profile
		haveBframes |= (prevVopType == VOP_TYPE_B);

		if ( lastFrame ) {
		  // finish read frames
		  break;
		}
                sampleId++;
            } // not the first time

            currentSampleTime += mp4FrameDuration;

            // Move the current frame to the beginning of the
            // buffer
            memmove(sampleBuffer, pCurrentSample, sampleSize);
            prevSampleSize = sampleSize;
            prevVopType = vopType;
            // reset pointers
            pObj = pCurrentSample = sampleBuffer + sampleSize;
        } // we have VOP

        // load next object from bitstream
        if (!LoadNextObject(inFile, pObj, &objSize, &objType)) {
            if (objType != MP4AV_MPEG4_VOP_START)
                break;
            lastFrame = true;
            objSize = 0;
            continue;
        }
        // guard against buffer overflow
        if (pObj + objSize >= pCurrentSample + maxSampleSize) {
            fprintf(stderr,
                    "%s: buffer overflow, invalid video stream?\n", ProgName);
            MP4DeleteTrack(mp4File, trackId);
            return MP4_INVALID_TRACK_ID;
        }
#ifdef DEBUG_MP4V
        if (Verbosity & MP4_DETAILS_SAMPLE) {
            printf("MP4V type %x size %u\n",
                    objType, objSize);
        }
#endif
    }
    bool doRenderingOffset = false;
    switch (videoProfileLevel) {
    case MPEG4_SP_L0:
    case MPEG4_SP_L1:
    case MPEG4_SP_L2:
    case MPEG4_SP_L3:
      break;
    default:
      doRenderingOffset = true;
      break;
    }
   
    if (doRenderingOffset && haveBframes) {
      // only generate ctts (with rendering offset for I, P frames) when
      // we need one.  We saved all the frames types and timestamps above - 
      // we can't use MP4ReadSample, because the end frames might not have
      // been written 
      refVopId = 1;
      refVopTime = 0;
      MP4SampleId maxSamples = MP4GetTrackNumberOfSamples(mp4File, trackId);
      // start with sample 2 - we know the first one is a I frame
      mpeg4_frame_t *fr = head->next; // skip the first one
      for (MP4SampleId ix = 2; ix <= maxSamples; ix++) {
	if (fr->vopType != VOP_TYPE_B) {
#ifdef DEBUG_MP4V_TS
            printf("sample %u %u renderingOffset "U64"\n",
		   refVopId, fr->vopType, fr->frameTimestamp - refVopTime);
#endif
	  MP4SetSampleRenderingOffset(mp4File, trackId, refVopId, 
				      fr->frameTimestamp - refVopTime);
	  refVopId = ix;
	  refVopTime = fr->frameTimestamp;
	}
	fr = fr->next;
      }
      
#ifdef DEBUG_MP4V_TS
      printf("sample %u %u renderingOffset "U64"\n",
	     refVopId, fr->vopType, fr->frameTimestamp - refVopTime);
#endif
      MP4SetSampleRenderingOffset(mp4File, trackId, refVopId, 
				  fr->frameTimestamp - refVopTime);
    }

    while (head != NULL) {
      tail = head->next;
      free(head);
      head = tail;
    }
    // terminate session if encrypting
    if (doEncrypt) {
        if (ismacrypEndSession(ismaCrypSId) != 0) {
            fprintf(stderr,
                    "%s: could not end the ISMAcryp session\n",
                    ProgName);
        }
    }

    return trackId;
}
Example #7
0
static int ffmpeg_decode (codec_data_t *ptr,
                          frame_timestamp_t *pts,
                          int from_rtp,
                          int *sync_frame,
                          uint8_t *buffer,
                          uint32_t buflen,
                          void *ud)
{
    ffmpeg_codec_t *ffmpeg = (ffmpeg_codec_t *)ptr;
    uint32_t bytes_used = 0;
    int got_picture = 0;
    uint64_t ts = pts->msec_timestamp;

    //ffmpeg_message(LOG_ERR, "ffmpeg", "%u timestamp "U64, buflen, ts);
    if (ffmpeg->m_codec_opened == false) {
        // look for header, like above, and open it
        bool open_codec = true;
        switch (ffmpeg->m_codecId) {
        case CODEC_ID_H264:
            open_codec = ffmpeg_find_h264_size(ffmpeg, buffer, buflen);
            break;
        default:
            break;
        }
        if (open_codec) {
            if (avcodec_open(ffmpeg->m_c, ffmpeg->m_codec) < 0) {
                ffmpeg_message(LOG_CRIT, "ffmpeg", "failed to open codec");
                return buflen;
            }
            ffmpeg->m_codec_opened = true;
            ffmpeg_message(LOG_ERR, "ffmpeg", "opened codec");
        } else {
            ffmpeg_message(LOG_ERR, "ffmpeg", "no open %u "U64, buflen, ts);
            return buflen;
        }
    }

    // look and see if we have read the I frame.
    if (ffmpeg->m_got_i == false) {
        if (ffmpeg_frame_is_sync(ptr, buffer, buflen, NULL) == 0) {
            return buflen;
        }
        ffmpeg->m_got_i = true;
    }

    int ret;
    do {
        int local_got_picture;
        ret = avcodec_decode_video(ffmpeg->m_c,
                                   ffmpeg->m_picture,
                                   &local_got_picture,
                                   buffer + bytes_used,
                                   buflen - bytes_used);
        bytes_used += ret;
        //ffmpeg_message(LOG_CRIT, "ffmpeg", "used %d %d", ret, local_got_picture);
        got_picture |= local_got_picture;
    } while (ret != -1 && bytes_used < buflen);

    if (pts->timestamp_is_pts) {
        //ffmpeg_message(LOG_ERR, "ffmpeg", "pts timestamp "U64, ts);
        if (ffmpeg->m_codecId == CODEC_ID_MPEG2VIDEO) {
            if (ffmpeg->pts_convert.frame_rate == 0.0) {
                int have_mpeg2;
                uint32_t h, w;
                double bitrate, aspect_ratio;
                uint8_t profile;
                MP4AV_Mpeg3ParseSeqHdr(buffer, buflen,
                                       &have_mpeg2,
                                       &h, &w,
                                       &ffmpeg->pts_convert.frame_rate,
                                       &bitrate, &aspect_ratio,
                                       &profile);
            }

            int ftype;
            int header = MP4AV_Mpeg3FindPictHdr(buffer, buflen, &ftype);
            if (header >= 0) {
                uint16_t temp_ref = MP4AV_Mpeg3PictHdrTempRef(buffer + header);
                uint64_t ret;
                if (got_picture == 0 ||
                        mpeg3_find_dts_from_pts(&ffmpeg->pts_convert,
                                                ts,
                                                ftype,
                                                temp_ref,
                                                &ret) < 0) {
                    ffmpeg->have_cached_ts = false;
                    return buflen;
                }
#if 0
                ffmpeg->m_vft->log_msg(LOG_DEBUG, "ffmpeg", "pts "U64" dts "U64" temp %u type %u %u",
                                       ts, ret,
                                       temp_ref, ftype, got_picture);
#endif
                ts = ret;
                //	ffmpeg_message(LOG_ERR, "ffmpeg", "type %d ref %u "U64, ftype, temp_ref, ret);
            }
        } else if (ffmpeg->m_codecId == CODEC_ID_MPEG4) {
            uint8_t *vopstart = MP4AV_Mpeg4FindVop(buffer, buflen);
            if (vopstart) {
                int ftype = MP4AV_Mpeg4GetVopType(vopstart, buflen);
                uint64_t dts;
                if (MP4AV_calculate_dts_from_pts(&ffmpeg->pts_to_dts,
                                                 ts,
                                                 ftype,
                                                 &dts) < 0) {
                    ffmpeg->have_cached_ts = false;
#ifdef DEBUG_FFMPEG_PTS
                    ffmpeg_message(LOG_DEBUG, "ffmpeg", "type %d %d pts "U64" failed to calc",
                                   ftype, got_picture, ts);
#endif
                    return buflen;
                }
#ifdef DEBUG_FFMPEG_PTS
                ffmpeg_message(LOG_DEBUG, "ffmpeg", "type %d %d pts "U64" dts "U64,
                               ftype, got_picture, ts, dts);
#endif
                ts = dts;
            }
        } else if (ffmpeg->m_codecId == CODEC_ID_H264) {
            uint8_t *nal_ptr = buffer;
            uint32_t len = buflen;
            bool have_b_nal = false;
            do {
                if (h264_nal_unit_type_is_slice(h264_nal_unit_type(nal_ptr))) {
                    uint8_t slice_type;
                    if (h264_find_slice_type(nal_ptr, len, &slice_type, false) >= 0) {
                        have_b_nal = H264_TYPE_IS_B(slice_type);
                    }
                }
                uint32_t offset = h264_find_next_start_code(nal_ptr, len);
                if (offset == 0) {
                    len = 0;
                } else {
                    nal_ptr += offset;
                    len -= offset;
                }
            } while (len > 0 && have_b_nal == false);
            uint64_t dts;
            if (MP4AV_calculate_dts_from_pts(&ffmpeg->pts_to_dts,
                                             ts,
                                             have_b_nal ? VOP_TYPE_B : VOP_TYPE_P,
                                             &dts) < 0) {
                ffmpeg->have_cached_ts = false;
#ifdef DEBUG_FFMPEG_PTS
                ffmpeg_message(LOG_DEBUG, "ffmpeg", "pts "U64" failed to calc",
                               ts);
#endif
                return buflen;
            }
            ts = dts;
        }
    }
    if (got_picture != 0) {
        if (ffmpeg->m_video_initialized == false) {
            double aspect;
            if (ffmpeg->m_c->sample_aspect_ratio.den == 0) {
                aspect = 0.0; // don't have one
            } else {
                aspect = av_q2d(ffmpeg->m_c->sample_aspect_ratio);
            }
            if (ffmpeg->m_c->width == 0) {
                return buflen;
            }
            ffmpeg->m_vft->video_configure(ffmpeg->m_ifptr,
                                           ffmpeg->m_c->width,
                                           ffmpeg->m_c->height,
                                           VIDEO_FORMAT_YUV,
                                           aspect);
            ffmpeg->m_video_initialized = true;
        }

        if (ffmpeg->m_c->pix_fmt != PIX_FMT_YUV420P) {
            // convert the image from whatever it is to YUV 4:2:0
            AVPicture from, to;
            int ret;
            // get the buffer to copy into (put it right into the ring buffer)
            ret = ffmpeg->m_vft->video_get_buffer(ffmpeg->m_ifptr,
                                                  &to.data[0],
                                                  &to.data[1],
                                                  &to.data[2]);
            if (ret == 0) {
                return buflen;
            }
            // set up the AVPicture structures
            to.linesize[0] = ffmpeg->m_c->width;
            to.linesize[1] = ffmpeg->m_c->width / 2;
            to.linesize[2] = ffmpeg->m_c->width / 2;
            for (int ix = 0; ix < 4; ix++) {
                from.data[ix] = ffmpeg->m_picture->data[ix];
                from.linesize[ix] = ffmpeg->m_picture->linesize[ix];
            }

            img_convert(&to, PIX_FMT_YUV420P,
                        &from, ffmpeg->m_c->pix_fmt,
                        ffmpeg->m_c->width, ffmpeg->m_c->height);
            ffmpeg->m_vft->video_filled_buffer(ffmpeg->m_ifptr,
                                               ffmpeg->have_cached_ts ?
                                               ffmpeg->cached_ts : ts);
        } else {
            ffmpeg->m_vft->video_have_frame(ffmpeg->m_ifptr,
                                            ffmpeg->m_picture->data[0],
                                            ffmpeg->m_picture->data[1],
                                            ffmpeg->m_picture->data[2],
                                            ffmpeg->m_picture->linesize[0],
                                            ffmpeg->m_picture->linesize[1],
                                            ffmpeg->have_cached_ts ?
                                            ffmpeg->cached_ts : ts);
        }
        ffmpeg->cached_ts = ts;
    } else {
        ffmpeg->cached_ts = ts;
        ffmpeg->have_cached_ts = true;
    }
#ifdef DEBUG_FFMPEG_FRAME
    ffmpeg_message(LOG_DEBUG, "ffmpeg", "used %u of %u", bytes_used, buflen);
#endif
    return (buflen);
}
Example #8
0
static int xvid_decode (codec_data_t *ptr,
			frame_timestamp_t *pts, 
			int from_rtp,
			int *sync_frame,
			uint8_t *buffer, 
			uint32_t blen,
			void *ud)
{
  int ret;
  xvid_codec_t *xvid = (xvid_codec_t *)ptr;
  uint64_t ts = pts->msec_timestamp;
  int buflen = blen, used = 0;

  uint8_t *vop = MP4AV_Mpeg4FindVop(buffer, blen);
  int type = 0;
  if (vop != NULL) {
    type = MP4AV_Mpeg4GetVopType(vop, blen);
    uint64_t dts;
    if (MP4AV_calculate_dts_from_pts(&xvid->pts_to_dts,
				     ts,
				     type,
				     &dts) < 0) {
      return buflen;
    }
    ts = dts;
  }
#if 0
  xvid_message(LOG_DEBUG, "xvidif", "%u at %llu %d", 
	       blen, ts, type);
#endif

  if (xvid->m_decodeState == XVID_STATE_VO_SEARCH) {
    ret = look_for_vol(xvid, buffer, buflen);
    if (ret < 0) {
      return buflen;
    }
    xvid->m_decodeState = XVID_STATE_NORMAL;
  }
  xvid_dec_frame_t dec;
  xvid_dec_stats_t stats;
  do {
    memset(&dec, 0, sizeof(dec));
    memset(&stats, 0, sizeof(dec));
	   
    dec.version = XVID_VERSION;
    dec.bitstream = buffer;
    dec.length = buflen;
    dec.general = 0;
    dec.output.csp = XVID_CSP_INTERNAL;

    stats.version = XVID_VERSION;

    ret = xvid_decore(xvid->m_xvid_handle, 
		      XVID_DEC_DECODE, 
		      &dec, 
		      &stats);
#if 0
    xvid_message(LOG_DEBUG, "xvidif", "ret %d type %d blen %d of %u",
		 ret, stats.type, buflen, blen);
#endif
    if (ret < 0 || ret > buflen) {
      buflen = 0;
      used = blen;
    } else {
      buflen -= ret;
      buffer += ret;
      used += ret;
    }
    // we could check for vol changes, etc here, if we wanted.
  } while (buflen > 4 && stats.type <= 0);

  if (stats.type > 0) {
    xvid->m_vft->video_have_frame(xvid->m_ifptr,
				  (const uint8_t *)dec.output.plane[0],
				  (const uint8_t *)dec.output.plane[1],
				  (const uint8_t *)dec.output.plane[2],
				  dec.output.stride[0],
				  dec.output.stride[1],
				  ts);
  } 
#if 0
    xvid->m_vft->log_msg(LOG_DEBUG, "xvid", "error returned %d", ret);
#endif
    xvid->m_total_frames++;
  return (used);
}