static void performSeekTest(const sp<MediaSource> &source) { CHECK_EQ((status_t)OK, source->start()); int64_t durationUs; CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs)); for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs; seekTimeUs += 60000ll) { MediaSource::ReadOptions options; options.setSeekTo( seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); MediaBuffer *buffer; status_t err; for (;;) { err = source->read(&buffer, &options); options.clearSeekTo(); if (err == INFO_FORMAT_CHANGED) { CHECK(buffer == NULL); continue; } if (err != OK) { CHECK(buffer == NULL); break; } if (buffer->range_length() > 0) { break; } CHECK(buffer != NULL); buffer->release(); buffer = NULL; } if (err == OK) { int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); printf("%lld\t%lld\t%lld\n", seekTimeUs, timeUs, seekTimeUs - timeUs); buffer->release(); buffer = NULL; } else { printf("ERROR\n"); break; } } CHECK_EQ((status_t)OK, source->stop()); }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { sp<MetaData> format = source->getFormat(); #ifndef MTK_HARDWARE // XXX: // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can // remove this check and always set the decoder output color format // skip this check for software decoders #ifndef QCOM_HARDWARE if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); #else if (!(flags & OMXCodec::kSoftwareCodecsOnly)) { if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); } #endif } #endif sp<MediaSource> decoder = OMXCodec::Create( client->interface(), format, false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { ALOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { ALOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0) { if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime) || thumbNailTime < 0) { thumbNailTime = 0; } options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK(buffer == NULL); ALOGV("decoding frame failed."); decoder->stop(); return NULL; } ALOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { ALOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t crop_left, crop_top, crop_right, crop_bottom; if (!meta->findRect( kKeyCropRect, &crop_left, &crop_top, &crop_right, &crop_bottom)) { crop_left = crop_top = 0; crop_right = width - 1; crop_bottom = height - 1; } int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = crop_right - crop_left + 1; frame->mHeight = crop_bottom - crop_top + 1; frame->mDisplayWidth = frame->mWidth; frame->mDisplayHeight = frame->mHeight; frame->mSize = frame->mWidth * frame->mHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t displayWidth, displayHeight; if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) { frame->mDisplayWidth = displayWidth; } if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) { frame->mDisplayHeight = displayHeight; } int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); #ifdef MTK_HARDWARE { int32_t Stridewidth,SliceHeight; CHECK(meta->findInt32(kKeyStride, &Stridewidth)); CHECK(meta->findInt32(kKeySliceHeight, &SliceHeight)); ALOGD("kKeyWidth=%d,kKeyHeight=%d",width,height); ALOGD("Stridewidth=%d,SliceHeight=%d",Stridewidth,SliceHeight); width=Stridewidth; height=SliceHeight; } #endif ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); if (converter.isValid()) { err = converter.convert( (const uint8_t *)buffer->data() + buffer->range_offset(), width, height, crop_left, crop_top, crop_right, crop_bottom, frame->mData, frame->mWidth, frame->mHeight, 0, 0, frame->mWidth - 1, frame->mHeight - 1); } else { ALOGE("Unable to instantiate color conversion from format 0x%08x to " "RGB565", srcFormat); err = ERROR_UNSUPPORTED; } buffer->release(); buffer = NULL; decoder->stop(); if (err != OK) { ALOGE("Colorconverter failed to convert frame."); delete frame; frame = NULL; } return frame; } VideoFrame *StagefrightMetadataRetriever::getFrameAtTime( int64_t timeUs, int option) { ALOGV("getFrameAtTime: %lld us option: %d", timeUs, option); if (mExtractor.get() == NULL) { ALOGV("no extractor."); return NULL; } sp<MetaData> fileMeta = mExtractor->getMetaData(); if (fileMeta == NULL) { ALOGV("extractor doesn't publish metadata, failed to initialize?"); return NULL; } int32_t drm = 0; if (fileMeta->findInt32(kKeyIsDRM, &drm) && drm != 0) { ALOGE("frame grab not allowed."); return NULL; } size_t n = mExtractor->countTracks(); size_t i; for (i = 0; i < n; ++i) { sp<MetaData> meta = mExtractor->getTrackMetaData(i); const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); if (!strncasecmp(mime, "video/", 6)) { break; } } if (i == n) { ALOGV("no video track found."); return NULL; } sp<MetaData> trackMeta = mExtractor->getTrackMetaData( i, MediaExtractor::kIncludeExtensiveMetaData); sp<MediaSource> source = mExtractor->getTrack(i); if (source.get() == NULL) { ALOGV("unable to instantiate video track."); return NULL; } const void *data; uint32_t type; size_t dataSize; if (fileMeta->findData(kKeyAlbumArt, &type, &data, &dataSize) && mAlbumArt == NULL) { mAlbumArt = new MediaAlbumArt; mAlbumArt->mSize = dataSize; mAlbumArt->mData = new uint8_t[dataSize]; memcpy(mAlbumArt->mData, data, dataSize); } VideoFrame *frame = extractVideoFrameWithCodecFlags( #ifndef QCOM_HARDWARE &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs, #else &mClient, trackMeta, source, OMXCodec::kSoftwareCodecsOnly, #endif timeUs, option); if (frame == NULL) { ALOGV("Software decoder failed to extract thumbnail, " "trying hardware decoder."); frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0, timeUs, option); } return frame; }
/*! * \brief The decode thread function. * The main Loop is used to read data from decoder, and put them to render. */ void UMMediaPlayer::videoEntry(void) { bool eof = false; MediaBuffer *lastBuffer = NULL; MediaBuffer *buffer; MediaSource::ReadOptions options; mVideoStartTime = 0; dcount =0; UMLOG_ERR("videoEntry() ---- mVideoDecoder->start() begin"); status_t err = mVideoDecoder->start(); if(err != OK) { UMLOG_ERR("videoEntry() ---- mVideoDecoder->start() end failed err=%d", err); mHasDspError = 1; mPlaying = false; if(mVideoSource->HasAnyDataSource()) { mVideoSource->read(&buffer, &options); releaseBufferIfNonNULL(&buffer); } } while(mPlaying && !mVideoSource->HasAnyDataSource()) { usleep(50 * 1000); } while(mPlaying) { status_t err = mVideoDecoder->read(&buffer, &options); options.clearSeekTo(); if(err == INFO_FORMAT_CHANGED) { UMLOG_ERR("VideoSource signalled format change."); if(mVideoRenderer != NULL) { initRenderer(); } continue; } if(err != OK && buffer == NULL) { mHasDspError = 1; mPlaying = false; continue; } dcount++; printPDecodeDateToFile((char*)buffer->data()); if(dcount == 10){ if(fd != NULL){ fclose(fd); } } if(buffer == NULL) { usleep(3000); eof = true; continue; } CHECK((err == OK && buffer != NULL) || (err != OK && buffer == NULL)); if(err != OK) { eof = true; mPlaying = false; continue; } if(buffer->range_length() == 0) { buffer->release(); buffer = NULL; continue; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if(0 == timeUs) { unsigned int currTS = mVideoSource->um_util_getCurrentTick(); if(currTS <= lastRenderTS + UM_RENDER_MAX_INTERVAL) { //There is a frame has been pushed into render in last UM_RENDER_MAX_INTERVAL ms //skip the compensate frame then buffer->release(); buffer = NULL; continue; } /* push componsate frame to render */ } displayOrDiscardFrame(&lastBuffer, buffer, 0); } releaseBufferIfNonNULL(&lastBuffer); shutdownVideoDecoder(); }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { sp<MetaData> format = source->getFormat(); // XXX: // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can // remove this check and always set the decoder output color format if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); } sp<MediaSource> decoder = OMXCodec::Create( client->interface(), format, false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { ALOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { ALOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0) { if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime) || thumbNailTime < 0) { thumbNailTime = 0; } options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK(buffer == NULL); ALOGV("decoding frame failed."); decoder->stop(); return NULL; } ALOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { ALOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); ALOGV("thumbNailTime = %" PRId64 " us, timeUs = %" PRId64 " us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t crop_left, crop_top, crop_right, crop_bottom; if (!meta->findRect( kKeyCropRect, &crop_left, &crop_top, &crop_right, &crop_bottom)) { crop_left = crop_top = 0; crop_right = width - 1; crop_bottom = height - 1; } int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = crop_right - crop_left + 1; frame->mHeight = crop_bottom - crop_top + 1; frame->mDisplayWidth = frame->mWidth; frame->mDisplayHeight = frame->mHeight; frame->mSize = frame->mWidth * frame->mHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t displayWidth, displayHeight; if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) { frame->mDisplayWidth = displayWidth; } if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) { frame->mDisplayHeight = displayHeight; } int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); if (converter.isValid()) { err = converter.convert( (const uint8_t *)buffer->data() + buffer->range_offset(), width, height, crop_left, crop_top, crop_right, crop_bottom, frame->mData, frame->mWidth, frame->mHeight, 0, 0, frame->mWidth - 1, frame->mHeight - 1); } else { ALOGE("Unable to instantiate color conversion from format 0x%08x to " "RGB565", srcFormat); err = ERROR_UNSUPPORTED; } buffer->release(); buffer = NULL; decoder->stop(); if (err != OK) { ALOGE("Colorconverter failed to convert frame."); delete frame; frame = NULL; } return frame; }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags) { sp<MediaSource> decoder = OMXCodec::Create( client->interface(), source->getFormat(), false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { LOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { LOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; int64_t thumbNailTime; if (trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) { options.setSeekTo(thumbNailTime); } else { thumbNailTime = -1; } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK_EQ(buffer, NULL); LOGV("decoding frame failed."); decoder->stop(); return NULL; } LOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { LOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); LOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = width; frame->mHeight = height; frame->mDisplayWidth = width; frame->mDisplayHeight = height; frame->mSize = width * height * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, frame->mData, width * 2); buffer->release(); buffer = NULL; decoder->stop(); return frame; }
status_t Harness::testSeek( const char *componentName, const char *componentRole) { bool isEncoder = !strncmp(componentRole, "audio_encoder.", 14) || !strncmp(componentRole, "video_encoder.", 14); if (isEncoder) { // Not testing seek behaviour for encoders. printf(" * Not testing seek functionality for encoders.\n"); return OK; } const char *mime = GetMimeFromComponentRole(componentRole); if (!mime) { LOGI("Cannot perform seek test with this componentRole (%s)", componentRole); return OK; } sp<MediaSource> source = CreateSourceForMime(mime); sp<MediaSource> seekSource = CreateSourceForMime(mime); if (source == NULL || seekSource == NULL) { return UNKNOWN_ERROR; } CHECK_EQ(seekSource->start(), OK); sp<MediaSource> codec = OMXCodec::Create( mOMX, source->getFormat(), false /* createEncoder */, source, componentName); CHECK(codec != NULL); CHECK_EQ(codec->start(), OK); int64_t durationUs; CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs)); LOGI("stream duration is %lld us (%.2f secs)", durationUs, durationUs / 1E6); static const int32_t kNumIterations = 5000; // We are always going to seek beyond EOS in the first iteration (i == 0) // followed by a linear read for the second iteration (i == 1). // After that it's all random. for (int32_t i = 0; i < kNumIterations; ++i) { int64_t requestedSeekTimeUs; int64_t actualSeekTimeUs; MediaSource::ReadOptions options; double r = uniform_rand(); if ((i == 1) || (i > 0 && r < 0.5)) { // 50% chance of just continuing to decode from last position. requestedSeekTimeUs = -1; LOGI("requesting linear read"); } else { if (i == 0 || r < 0.55) { // 5% chance of seeking beyond end of stream. requestedSeekTimeUs = durationUs; LOGI("requesting seek beyond EOF"); } else { requestedSeekTimeUs = (int64_t)(uniform_rand() * durationUs); LOGI("requesting seek to %lld us (%.2f secs)", requestedSeekTimeUs, requestedSeekTimeUs / 1E6); } MediaBuffer *buffer = NULL; options.setSeekTo( requestedSeekTimeUs, MediaSource::ReadOptions::SEEK_NEXT_SYNC); if (seekSource->read(&buffer, &options) != OK) { CHECK_EQ(buffer, NULL); actualSeekTimeUs = -1; } else { CHECK(buffer != NULL); CHECK(buffer->meta_data()->findInt64(kKeyTime, &actualSeekTimeUs)); CHECK(actualSeekTimeUs >= 0); buffer->release(); buffer = NULL; } LOGI("nearest keyframe is at %lld us (%.2f secs)", actualSeekTimeUs, actualSeekTimeUs / 1E6); } status_t err; MediaBuffer *buffer; for (;;) { err = codec->read(&buffer, &options); options.clearSeekTo(); if (err == INFO_FORMAT_CHANGED) { CHECK_EQ(buffer, NULL); continue; } if (err == OK) { CHECK(buffer != NULL); if (buffer->range_length() == 0) { buffer->release(); buffer = NULL; continue; } } else { CHECK_EQ(buffer, NULL); } break; } if (requestedSeekTimeUs < 0) { // Linear read. if (err != OK) { CHECK_EQ(buffer, NULL); } else { CHECK(buffer != NULL); buffer->release(); buffer = NULL; } } else if (actualSeekTimeUs < 0) { EXPECT(err != OK, "We attempted to seek beyond EOS and expected " "ERROR_END_OF_STREAM to be returned, but instead " "we got a valid buffer."); EXPECT(err == ERROR_END_OF_STREAM, "We attempted to seek beyond EOS and expected " "ERROR_END_OF_STREAM to be returned, but instead " "we found some other error."); CHECK_EQ(err, ERROR_END_OF_STREAM); CHECK_EQ(buffer, NULL); } else { EXPECT(err == OK, "Expected a valid buffer to be returned from " "OMXCodec::read."); CHECK(buffer != NULL); int64_t bufferTimeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &bufferTimeUs)); if (!CloseEnough(bufferTimeUs, actualSeekTimeUs)) { printf("\n * Attempted seeking to %lld us (%.2f secs)", requestedSeekTimeUs, requestedSeekTimeUs / 1E6); printf("\n * Nearest keyframe is at %lld us (%.2f secs)", actualSeekTimeUs, actualSeekTimeUs / 1E6); printf("\n * Returned buffer was at %lld us (%.2f secs)\n\n", bufferTimeUs, bufferTimeUs / 1E6); buffer->release(); buffer = NULL; CHECK_EQ(codec->stop(), OK); return UNKNOWN_ERROR; } buffer->release(); buffer = NULL; } } CHECK_EQ(codec->stop(), OK); return OK; }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { #ifdef OMAP_ENHANCEMENT flags |= OMXCodec::kPreferThumbnailMode; #ifdef TARGET_OMAP4 int32_t isInterlaced = false; //Call config parser to update profile,level,interlaced,reference frame data updateMetaData(trackMeta); trackMeta->findInt32(kKeyVideoInterlaced, &isInterlaced); if(isInterlaced) { flags |= OMXCodec::kPreferInterlacedOutputContent; } #endif #endif sp<MediaSource> decoder = OMXCodec::Create( client->interface(), source->getFormat(), false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { LOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { LOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { LOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0 && trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) { options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs < 0 ? 0 : frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); #ifdef OMAP_ENHANCEMENT if(err == INFO_FORMAT_CHANGED) { int32_t w1,h1; decoder->getFormat()->findInt32(kKeyWidth, &w1); decoder->getFormat()->findInt32(kKeyHeight, &h1); LOGD("Got portreconfig event. New WxH %dx%d. wait 5mS for port to be enabled",w1,h1); usleep(5000); //sleep 5mS for port disable-enable to complete } #endif options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK_EQ(buffer, NULL); LOGV("decoding frame failed."); decoder->stop(); return NULL; } LOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { LOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); LOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; #if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP4) int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); int32_t format; const char *component; //cache the display width and height int32_t displayWidth, displayHeight; displayWidth = width; displayHeight = height; //update width & height with the buffer width&height if(!(meta->findInt32(kKeyPaddedWidth, &width))) { CHECK(meta->findInt32(kKeyWidth, &width)); } if(!(meta->findInt32(kKeyPaddedHeight, &height))) { CHECK(meta->findInt32(kKeyHeight, &height)); } LOGD("VideoFrame WxH %dx%d", displayWidth, displayHeight); if(((OMX_COLOR_FORMATTYPE)srcFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar) || ((OMX_COLOR_FORMATTYPE)srcFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar_Sequential_TopBottom)){ frame->mWidth = displayWidth; frame->mHeight = displayHeight; frame->mDisplayWidth = displayWidth; frame->mDisplayHeight = displayHeight; frame->mSize = displayWidth * displayHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; }else { frame->mWidth = width; frame->mHeight = height; frame->mDisplayWidth = width; frame->mDisplayHeight = height; frame->mSize = width * height * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; } if(((OMX_COLOR_FORMATTYPE)srcFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar) || ((OMX_COLOR_FORMATTYPE)srcFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar_Sequential_TopBottom)){ ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, //1D buffer in 1.16 Ducati rls. If 2D buffer -> 4096 stride should be used frame->mData, displayWidth * 2, displayWidth,displayHeight,buffer->range_offset(),isInterlaced); } else{ ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, frame->mData, width * 2); } #else frame->mWidth = width; frame->mHeight = height; frame->mDisplayWidth = width; frame->mDisplayHeight = height; frame->mSize = width * height * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, frame->mData, width * 2); #endif buffer->release(); buffer = NULL; decoder->stop(); return frame; }
static void playSource(OMXClient *client, sp<MediaSource> &source) { sp<MetaData> meta = source->getFormat(); const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); sp<MediaSource> rawSource; if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { rawSource = source; } else { int flags = 0; if (gPreferSoftwareCodec) { flags |= OMXCodec::kPreferSoftwareCodecs; } if (gForceToUseHardwareCodec) { CHECK(!gPreferSoftwareCodec); flags |= OMXCodec::kHardwareCodecsOnly; } rawSource = OMXCodec::Create( client->interface(), meta, false /* createEncoder */, source, NULL /* matchComponentName */, flags, gSurface); if (rawSource == NULL) { fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime); return; } displayAVCProfileLevelIfPossible(meta); } source.clear(); status_t err = rawSource->start(); if (err != OK) { fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err); return; } if (gPlaybackAudio) { AudioPlayer *player = new AudioPlayer(NULL); player->setSource(rawSource); rawSource.clear(); player->start(true /* sourceAlreadyStarted */); status_t finalStatus; while (!player->reachedEOS(&finalStatus)) { usleep(100000ll); } delete player; player = NULL; return; } else if (gReproduceBug >= 3 && gReproduceBug <= 5) { int64_t durationUs; CHECK(meta->findInt64(kKeyDuration, &durationUs)); status_t err; MediaBuffer *buffer; MediaSource::ReadOptions options; int64_t seekTimeUs = -1; for (;;) { err = rawSource->read(&buffer, &options); options.clearSeekTo(); bool shouldSeek = false; if (err == INFO_FORMAT_CHANGED) { CHECK(buffer == NULL); printf("format changed.\n"); continue; } else if (err != OK) { printf("reached EOF.\n"); shouldSeek = true; } else { int64_t timestampUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs)); bool failed = false; if (seekTimeUs >= 0) { int64_t diff = timestampUs - seekTimeUs; if (diff < 0) { diff = -diff; } if ((gReproduceBug == 4 && diff > 500000) || (gReproduceBug == 5 && timestampUs < 0)) { printf("wanted: %.2f secs, got: %.2f secs\n", seekTimeUs / 1E6, timestampUs / 1E6); printf("ERROR: "); failed = true; } } printf("buffer has timestamp %lld us (%.2f secs)\n", timestampUs, timestampUs / 1E6); buffer->release(); buffer = NULL; if (failed) { break; } shouldSeek = ((double)rand() / RAND_MAX) < 0.1; if (gReproduceBug == 3) { shouldSeek = false; } } seekTimeUs = -1; if (shouldSeek) { seekTimeUs = (rand() * (float)durationUs) / RAND_MAX; options.setSeekTo(seekTimeUs); printf("seeking to %lld us (%.2f secs)\n", seekTimeUs, seekTimeUs / 1E6); } } rawSource->stop(); return; } int n = 0; int64_t startTime = getNowUs(); long numIterationsLeft = gNumRepetitions; MediaSource::ReadOptions options; int64_t sumDecodeUs = 0; int64_t totalBytes = 0; Vector<int64_t> decodeTimesUs; while (numIterationsLeft-- > 0) { long numFrames = 0; MediaBuffer *buffer; for (;;) { int64_t startDecodeUs = getNowUs(); status_t err = rawSource->read(&buffer, &options); int64_t delayDecodeUs = getNowUs() - startDecodeUs; options.clearSeekTo(); if (err != OK) { CHECK(buffer == NULL); if (err == INFO_FORMAT_CHANGED) { printf("format changed.\n"); continue; } break; } if (buffer->range_length() > 0) { if (gDisplayHistogram && n > 0) { // Ignore the first time since it includes some setup // cost. decodeTimesUs.push(delayDecodeUs); } if ((n++ % 16) == 0) { printf("."); fflush(stdout); } } sumDecodeUs += delayDecodeUs; totalBytes += buffer->range_length(); buffer->release(); buffer = NULL; ++numFrames; if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) { break; } if (gReproduceBug == 1 && numFrames == 40) { printf("seeking past the end now."); options.setSeekTo(0x7fffffffL); } else if (gReproduceBug == 2 && numFrames == 40) { printf("seeking to 5 secs."); options.setSeekTo(5000000); } } printf("$"); fflush(stdout); options.setSeekTo(0); } rawSource->stop(); printf("\n"); int64_t delay = getNowUs() - startTime; if (!strncasecmp("video/", mime, 6)) { printf("avg. %.2f fps\n", n * 1E6 / delay); printf("avg. time to decode one buffer %.2f usecs\n", (double)sumDecodeUs / n); printf("decoded a total of %d frame(s).\n", n); if (gDisplayHistogram) { displayDecodeHistogram(&decodeTimesUs); } } else if (!strncasecmp("audio/", mime, 6)) { // Frame count makes less sense for audio, as the output buffer // sizes may be different across decoders. printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay); printf("decoded a total of %lld bytes\n", totalBytes); } }
/** ******************************************************************************* * @brief Gets an access unit (AU) from the stream handler source. * @note AU is the smallest possible amount of data to be decoded by decoder * * @param context: (IN) Context of the reader * @param pStreamHandler (IN) The stream handler of the stream to make jump * @param pAccessUnit (I/O)Pointer to an access unit to fill with read data * @return M4NO_ERROR there is no error * @return M4ERR_PARAMETER at least one parameter is not properly set * @returns M4ERR_ALLOC memory allocation failed * @returns M4WAR_NO_MORE_AU there are no more access unit in the stream ******************************************************************************* */ M4OSA_ERR VideoEditorMp3Reader_getNextAu(M4OSA_Context context, M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) { VideoEditorMp3Reader_Context *pReaderContext = (VideoEditorMp3Reader_Context*)context; M4OSA_ERR err = M4NO_ERROR; M4SYS_AccessUnit* pAu; MediaBuffer *mAudioBuffer; MediaSource::ReadOptions options; ALOGV("VideoEditorMp3Reader_getNextAu start"); M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER, "VideoEditorMp3Reader_getNextAu: invalid context"); M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER, "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_StreamHandler"); M4OSA_DEBUG_IF1((pAccessUnit == 0), M4ERR_PARAMETER, "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_AccessUnit"); if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\ mAudioStreamHandler) { pAu = &pReaderContext->mAudioAu; } else { ALOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n"); return M4ERR_PARAMETER; } if (pReaderContext->mSeeking) { options.setSeekTo(pReaderContext->mSeekTime); } pReaderContext->mMediaSource->read(&mAudioBuffer, &options); if (mAudioBuffer != NULL) { if ((pAu->dataAddress == NULL) || (pAu->size < mAudioBuffer->range_length())) { if (pAu->dataAddress != NULL) { free((M4OSA_Int32*)pAu->dataAddress); pAu->dataAddress = NULL; } pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc( (mAudioBuffer->range_length() + 3) & ~0x3, M4READER_MP3, (M4OSA_Char*)"pAccessUnit->m_dataAddress" ); if (pAu->dataAddress == NULL) { ALOGV("VideoEditorMp3Reader_getNextAu malloc failed"); pReaderContext->mMediaSource->stop(); pReaderContext->mMediaSource.clear(); pReaderContext->mDataSource.clear(); return M4ERR_ALLOC; } } pAu->size = mAudioBuffer->range_length(); memcpy((M4OSA_MemAddr8)pAu->dataAddress, (const char *)mAudioBuffer->data() + mAudioBuffer->range_offset(), mAudioBuffer->range_length()); mAudioBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&pAu->CTS); pAu->CTS = pAu->CTS / 1000; /*converting the microsec to millisec */ pAu->DTS = pAu->CTS; pAu->attribute = M4SYS_kFragAttrOk; mAudioBuffer->release(); ALOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS); pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress; pAccessUnit->m_size = pAu->size; pAccessUnit->m_CTS = pAu->CTS; pAccessUnit->m_DTS = pAu->DTS; pAccessUnit->m_attribute = pAu->attribute; } else { ALOGV("VideoEditorMp3Reader_getNextAu EOS reached."); pAccessUnit->m_size=0; err = M4WAR_NO_MORE_AU; } pAu->nbFrag = 0; options.clearSeekTo(); pReaderContext->mSeeking = M4OSA_FALSE; mAudioBuffer = NULL; ALOGV("VideoEditorMp3Reader_getNextAu end"); return err; }
void Process() { Frame* frame; int32_t w, h; int decode_done = 0; MediaSource::ReadOptions readopt; // GLuint texid; //SetPriority(THREAD_PRIORITY_ABOVE_NORMAL); do { #if defined(DEBUG_VERBOSE) unsigned int time = XbmcThreads::SystemClockMillis(); CLog::Log(LOGDEBUG, "%s: >>> Handling frame\n", CLASSNAME); #endif p->cur_frame = NULL; frame = (Frame*)malloc(sizeof(Frame)); if (!frame) { decode_done = 1; continue; } frame->eglimg = EGL_NO_IMAGE_KHR; frame->medbuf = NULL; if (p->resetting) { readopt.setSeekTo(0); p->resetting = false; } frame->status = p->decoder->read(&frame->medbuf, &readopt); readopt.clearSeekTo(); if (frame->status == OK) { if (!frame->medbuf->graphicBuffer().get()) // hw buffers { if (frame->medbuf->range_length() == 0) { CLog::Log(LOGERROR, "%s - Invalid buffer\n", CLASSNAME); frame->status = VC_ERROR; decode_done = 1; frame->medbuf->release(); frame->medbuf = NULL; } else frame->format = RENDER_FMT_YUV420P; } else frame->format = RENDER_FMT_EGLIMG; } if (frame->status == OK) { sp<MetaData> outFormat = p->decoder->getFormat(); outFormat->findInt32(kKeyWidth , &w); outFormat->findInt32(kKeyHeight, &h); frame->pts = 0; frame->width = w; frame->height = h; frame->medbuf->meta_data()->findInt64(kKeyTime, &(frame->pts)); } else if (frame->status == INFO_FORMAT_CHANGED) { int32_t cropLeft, cropTop, cropRight, cropBottom; sp<MetaData> outFormat = p->decoder->getFormat(); outFormat->findInt32(kKeyWidth , &p->width); outFormat->findInt32(kKeyHeight, &p->height); cropLeft = cropTop = cropRight = cropBottom = 0; if (!outFormat->findRect(kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) { p->x = 0; p->y = 0; } else { p->x = cropLeft; p->y = cropTop; p->width = cropRight - cropLeft + 1; p->height = cropBottom - cropTop + 1; } outFormat->findInt32(kKeyColorFormat, &p->videoColorFormat); if (!outFormat->findInt32(kKeyStride, &p->videoStride)) p->videoStride = p->width; if (!outFormat->findInt32(kKeySliceHeight, &p->videoSliceHeight)) p->videoSliceHeight = p->height; #if defined(DEBUG_VERBOSE) CLog::Log(LOGDEBUG, ">>> new format col:%d, w:%d, h:%d, sw:%d, sh:%d, ctl:%d,%d; cbr:%d,%d\n", p->videoColorFormat, p->width, p->height, p->videoStride, p->videoSliceHeight, cropTop, cropLeft, cropBottom, cropRight); #endif if (frame->medbuf) frame->medbuf->release(); frame->medbuf = NULL; free(frame); continue; } else { CLog::Log(LOGERROR, "%s - decoding error (%d)\n", CLASSNAME,frame->status); if (frame->medbuf) frame->medbuf->release(); frame->medbuf = NULL; free(frame); continue; } if (frame->format == RENDER_FMT_EGLIMG) { if (!p->eglInitialized) { p->InitializeEGL(frame->width, frame->height); } else if (p->texwidth != frame->width || p->texheight != frame->height) { p->ReleaseEGL(); p->InitializeEGL(frame->width, frame->height); } ANativeWindowBuffer* graphicBuffer = frame->medbuf->graphicBuffer()->getNativeBuffer(); native_window_set_buffers_timestamp(p->natwin.get(), frame->pts * 1000); int err = p->natwin.get()->queueBuffer(p->natwin.get(), graphicBuffer); if (err == 0) frame->medbuf->meta_data()->setInt32(kKeyRendered, 1); frame->medbuf->release(); frame->medbuf = NULL; g_xbmcapp->UpdateStagefrightTexture(); // g_xbmcapp->GetSurfaceTexture()->updateTexImage(); if (!p->drop_state) { // static const EGLint eglImgAttrs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE, EGL_NONE }; // EGLImageKHR img = eglCreateImageKHR(p->eglDisplay, EGL_NO_CONTEXT, // EGL_NATIVE_BUFFER_ANDROID, // (EGLClientBuffer)graphicBuffer->getNativeBuffer(), // eglImgAttrs); p->free_mutex.lock(); stSlot* cur_slot = p->getFreeSlot(); if (!cur_slot) { CLog::Log(LOGERROR, "STF: No free output buffers\n"); continue; } p->fbo.BindToTexture(GL_TEXTURE_2D, cur_slot->texid); p->fbo.BeginRender(); glDisable(GL_DEPTH_TEST); //glClear(GL_COLOR_BUFFER_BIT); const GLfloat triangleVertices[] = { -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, }; glVertexAttribPointer(p->mPositionHandle, 2, GL_FLOAT, GL_FALSE, 0, triangleVertices); glEnableVertexAttribArray(p->mPositionHandle); glUseProgram(p->mPgm); glUniform1i(p->mTexSamplerHandle, 0); // glGenTextures(1, &texid); // glBindTexture(GL_TEXTURE_EXTERNAL_OES, texid); // glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, img); glBindTexture(GL_TEXTURE_EXTERNAL_OES, g_xbmcapp->GetAndroidTexture()); GLfloat texMatrix[16]; g_xbmcapp->GetStagefrightTransformMatrix(texMatrix); glUniformMatrix4fv(p->mTexMatrixHandle, 1, GL_FALSE, texMatrix); glDrawArrays(GL_TRIANGLE_FAN, 0, 4); glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0); p->fbo.EndRender(); glBindTexture(GL_TEXTURE_2D, 0); frame->eglimg = cur_slot->eglimg; p->free_mutex.unlock(); } } #if defined(DEBUG_VERBOSE) CLog::Log(LOGDEBUG, "%s: >>> pushed OUT frame; w:%d, h:%d, img:%p, tm:%d\n", CLASSNAME, frame->width, frame->height, frame->eglimg, XbmcThreads::SystemClockMillis() - time); #endif p->out_mutex.lock(); p->cur_frame = frame; while (p->cur_frame) p->out_condition.wait(p->out_mutex); p->out_mutex.unlock(); } while (!decode_done && !m_bStop); if (p->eglInitialized) p->ReleaseEGL(); }
void NuPlayer::GenericSource::readBuffer( bool audio, int64_t seekTimeUs, int64_t *actualTimeUs) { Track *track = audio ? &mAudioTrack : &mVideoTrack; CHECK(track->mSource != NULL); if (actualTimeUs) { *actualTimeUs = seekTimeUs; } MediaSource::ReadOptions options; bool seeking = false; if (seekTimeUs >= 0) { options.setSeekTo(seekTimeUs); seeking = true; } for (;;) { MediaBuffer *mbuf; status_t err = track->mSource->read(&mbuf, &options); options.clearSeekTo(); if (err == OK) { size_t outLength = mbuf->range_length(); if (audio && mAudioIsVorbis) { outLength += sizeof(int32_t); } sp<ABuffer> buffer = new ABuffer(outLength); memcpy(buffer->data(), (const uint8_t *)mbuf->data() + mbuf->range_offset(), mbuf->range_length()); if (audio && mAudioIsVorbis) { int32_t numPageSamples; if (!mbuf->meta_data()->findInt32( kKeyValidSamples, &numPageSamples)) { numPageSamples = -1; } memcpy(buffer->data() + mbuf->range_length(), &numPageSamples, sizeof(numPageSamples)); } int64_t timeUs; CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs)); buffer->meta()->setInt64("timeUs", timeUs); if (actualTimeUs) { *actualTimeUs = timeUs; } mbuf->release(); mbuf = NULL; if (seeking) { track->mPackets->queueDiscontinuity( ATSParser::DISCONTINUITY_SEEK, NULL); } track->mPackets->queueAccessUnit(buffer); break; } else if (err == INFO_FORMAT_CHANGED) { #if 0 track->mPackets->queueDiscontinuity( ATSParser::DISCONTINUITY_FORMATCHANGE, NULL); #endif } else { track->mPackets->signalEOS(err); break; } } }