static void dumpSource(const sp<MediaSource> &source, const String8 &filename) { FILE *out = fopen(filename.string(), "wb"); CHECK_EQ((status_t)OK, source->start()); status_t err; for (;;) { MediaBuffer *mbuf; err = source->read(&mbuf); if (err == INFO_FORMAT_CHANGED) { continue; } else if (err != OK) { break; } CHECK_EQ( fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(), 1, mbuf->range_length(), out), (ssize_t)mbuf->range_length()); mbuf->release(); mbuf = NULL; } CHECK_EQ((status_t)OK, source->stop()); fclose(out); out = NULL; }
status_t MyVorbisExtractor::init() { mMeta = new MetaData; mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); MediaBuffer *packet; status_t err; if ((err = readNextPacket(&packet)) != OK) { return err; } ALOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 1); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } ALOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 3); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } ALOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 5); packet->release(); packet = NULL; if (err != OK) { return err; } mFirstDataOffset = mOffset + mCurrentPageSize; off64_t size; uint64_t lastGranulePosition; if (mSource->getSize(&size) == OK && findPrevGranulePosition(size, &lastGranulePosition) == OK) { // Let's assume it's cheap to seek to the end. // The granule position of the final page in the stream will // give us the exact duration of the content, something that // we can only approximate using avg. bitrate if seeking to // the end is too expensive or impossible (live streaming). int64_t durationUs = lastGranulePosition * 1000000ll / mVi.rate; mMeta->setInt64(kKeyDuration, durationUs); buildTableOfContents(); } return OK; }
bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs) { MOZ_ASSERT(aSeekTimeUs >= -1); status_t err; if (mAudioMetadataRead && aSeekTimeUs == -1) { // Use the data read into the buffer during metadata time err = OK; } else { ReleaseAudioBuffer(); if (aSeekTimeUs != -1) { MediaSource::ReadOptions options; options.setSeekTo(aSeekTimeUs); err = mAudioSource->read(&mAudioBuffer, &options); } else { err = mAudioSource->read(&mAudioBuffer); } } mAudioMetadataRead = false; aSeekTimeUs = -1; if (err == OK && mAudioBuffer->range_length() != 0) { int64_t timeUs; if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { LOG("no frame time"); return false; } if (timeUs < 0) { LOG("frame time %lld must be nonnegative", timeUs); return false; } return ToAudioFrame(aFrame, timeUs, mAudioBuffer->data(), mAudioBuffer->range_offset(), mAudioBuffer->range_length(), mAudioChannels, mAudioSampleRate); } else if (err == INFO_FORMAT_CHANGED) { // If the format changed, update our cached info. LOG("mAudioSource INFO_FORMAT_CHANGED"); if (!SetAudioFormat()) return false; else return ReadAudio(aFrame, aSeekTimeUs); } else if (err == ERROR_END_OF_STREAM) { LOG("mAudioSource END_OF_STREAM"); } else if (err != OK) { LOG("mAudioSource ERROR %#x", err); } return err == OK; }
MediaBuffer* MidiEngine::readBuffer() { EAS_STATE state; EAS_State(mEasData, mEasHandle, &state); if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) { return NULL; } MediaBuffer *buffer; status_t err = mGroup->acquire_buffer(&buffer); if (err != OK) { ALOGE("readBuffer: no buffer"); return NULL; } EAS_I32 timeMs; EAS_GetLocation(mEasData, mEasHandle, &timeMs); int64_t timeUs = 1000ll * timeMs; buffer->meta_data()->setInt64(kKeyTime, timeUs); EAS_PCM* p = (EAS_PCM*) buffer->data(); int numBytesOutput = 0; for (int i = 0; i < NUM_COMBINE_BUFFERS; i++) { EAS_I32 numRendered; EAS_RESULT result = EAS_Render(mEasData, p, mEasConfig->mixBufferSize, &numRendered); if (result != EAS_SUCCESS) { ALOGE("EAS_Render returned %ld", result); break; } p += numRendered * mEasConfig->numChannels; numBytesOutput += numRendered * mEasConfig->numChannels * sizeof(EAS_PCM); } buffer->set_range(0, numBytesOutput); ALOGV("readBuffer: returning %zd in buffer %p", buffer->range_length(), buffer); return buffer; }
void VideoCodec::encode_video() { LOGD("VideoCodec::%s", __FUNCTION__); uint32_t framecount = 0; for (; ;) { MediaBuffer *mVideoBuffer; MediaSource::ReadOptions options; LOGD("try read frame"); status_t err = mVideoEncoder->read(&mVideoBuffer, &options); LOGD("encode read ret = 0x%08x", err); if (err == OK) { if (mVideoBuffer->range_length() > 0) { // If video frame availabe, render it to mNativeWindow // sp<MetaData> metaData = mVideoBuffer->meta_data(); // int64_t timeUs = 0; // metaData->findInt64(kKeyTime, &timeUs) // native_window_set_buffers_timestamp(mNativeWindow.get(), timeUs * 1000); // err = mNativeWindow->queueBuffer(mNativeWindow.get(), // mVideoBuffer->graphicBuffer().get()); // if (err == 0) { // metaData->setInt32(kKeyRendered, 1); // } framecount++; LOGD("encode frame success, framecount=%d", framecount); } if(framecount > 300){ break; } mVideoBuffer->release(); } // if (mMp4Write->reachedEOS()) { // LOGD("VideoCodec EOF"); // break; // } } }
status_t MyVorbisExtractor::init() { mMeta = new MetaData; mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); MediaBuffer *packet; status_t err; if ((err = readNextPacket(&packet)) != OK) { return err; } LOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 1); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } LOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 3); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } LOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 5); packet->release(); packet = NULL; if (err != OK) { return err; } mFirstDataOffset = mOffset + mCurrentPageSize; return OK; }
static void performSeekTest(const sp<MediaSource> &source) { CHECK_EQ((status_t)OK, source->start()); int64_t durationUs; CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs)); for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs; seekTimeUs += 60000ll) { MediaSource::ReadOptions options; options.setSeekTo( seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); MediaBuffer *buffer; status_t err; for (;;) { err = source->read(&buffer, &options); options.clearSeekTo(); if (err == INFO_FORMAT_CHANGED) { CHECK(buffer == NULL); continue; } if (err != OK) { CHECK(buffer == NULL); break; } if (buffer->range_length() > 0) { break; } CHECK(buffer != NULL); buffer->release(); buffer = NULL; } if (err == OK) { int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); printf("%lld\t%lld\t%lld\n", seekTimeUs, timeUs, seekTimeUs - timeUs); buffer->release(); buffer = NULL; } else { printf("ERROR\n"); break; } } CHECK_EQ((status_t)OK, source->stop()); }
bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs) { status_t err; if (mAudioMetadataRead && aSeekTimeUs == -1) { // Use the data read into the buffer during metadata time err = OK; } else { ReleaseAudioBuffer(); if (aSeekTimeUs != -1) { MediaSource::ReadOptions options; options.setSeekTo(aSeekTimeUs); err = mAudioSource->read(&mAudioBuffer, &options); } else { err = mAudioSource->read(&mAudioBuffer); } } mAudioMetadataRead = false; aSeekTimeUs = -1; if (err == OK && mAudioBuffer->range_length() != 0) { int64_t timeUs; if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) return false; return ToAudioFrame(aFrame, timeUs, mAudioBuffer->data(), mAudioBuffer->range_offset(), mAudioBuffer->range_length(), mAudioChannels, mAudioSampleRate); } else if (err == INFO_FORMAT_CHANGED && !SetAudioFormat()) { // If the format changed, update our cached info. return false; } else if (err == ERROR_END_OF_STREAM) return false; return true; }
status_t MatroskaSource::read( MediaBuffer **out, const ReadOptions *options) { *out = NULL; int64_t targetSampleTimeUs = -1ll; int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options && options->getSeekTo(&seekTimeUs, &mode) && !mExtractor->isLiveStreaming()) { clearPendingFrames(); // The audio we want is located by using the Cues to seek the video // stream to find the target Cluster then iterating to finalize for // audio. int64_t actualFrameTimeUs; mBlockIter.seek(seekTimeUs, mIsAudio, &actualFrameTimeUs); if (mode == ReadOptions::SEEK_CLOSEST) { targetSampleTimeUs = actualFrameTimeUs; } } while (mPendingFrames.empty()) { status_t err = readBlock(); if (err != OK) { clearPendingFrames(); return err; } } MediaBuffer *frame = *mPendingFrames.begin(); mPendingFrames.erase(mPendingFrames.begin()); if (mType != AVC || mNALSizeLen == 0) { if (targetSampleTimeUs >= 0ll) { frame->meta_data()->setInt64( kKeyTargetTime, targetSampleTimeUs); } *out = frame; return OK; } // Each input frame contains one or more NAL fragments, each fragment // is prefixed by mNALSizeLen bytes giving the fragment length, // followed by a corresponding number of bytes containing the fragment. // We output all these fragments into a single large buffer separated // by startcodes (0x00 0x00 0x00 0x01). // // When mNALSizeLen is 0, we assume the data is already in the format // desired. const uint8_t *srcPtr = (const uint8_t *)frame->data() + frame->range_offset(); size_t srcSize = frame->range_length(); size_t dstSize = 0; MediaBuffer *buffer = NULL; uint8_t *dstPtr = NULL; for (int32_t pass = 0; pass < 2; ++pass) { size_t srcOffset = 0; size_t dstOffset = 0; while (srcOffset + mNALSizeLen <= srcSize) { size_t NALsize; switch (mNALSizeLen) { case 1: NALsize = srcPtr[srcOffset]; break; case 2: NALsize = U16_AT(srcPtr + srcOffset); break; case 3: NALsize = U24_AT(srcPtr + srcOffset); break; case 4: NALsize = U32_AT(srcPtr + srcOffset); break; default: TRESPASS(); } if (srcOffset + mNALSizeLen + NALsize <= srcOffset + mNALSizeLen) { frame->release(); frame = NULL; return ERROR_MALFORMED; } else if (srcOffset + mNALSizeLen + NALsize > srcSize) { break; } if (pass == 1) { memcpy(&dstPtr[dstOffset], "\x00\x00\x00\x01", 4); if (frame != buffer) { memcpy(&dstPtr[dstOffset + 4], &srcPtr[srcOffset + mNALSizeLen], NALsize); } } dstOffset += 4; // 0x00 00 00 01 dstOffset += NALsize; srcOffset += mNALSizeLen + NALsize; } if (srcOffset < srcSize) { // There were trailing bytes or not enough data to complete // a fragment. frame->release(); frame = NULL; return ERROR_MALFORMED; } if (pass == 0) { dstSize = dstOffset; if (dstSize == srcSize && mNALSizeLen == 4) { // In this special case we can re-use the input buffer by substituting // each 4-byte nal size with a 4-byte start code buffer = frame; } else { buffer = new MediaBuffer(dstSize); } int64_t timeUs; CHECK(frame->meta_data()->findInt64(kKeyTime, &timeUs)); int32_t isSync; CHECK(frame->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)); buffer->meta_data()->setInt64(kKeyTime, timeUs); buffer->meta_data()->setInt32(kKeyIsSyncFrame, isSync); dstPtr = (uint8_t *)buffer->data(); } } if (frame != buffer) { frame->release(); frame = NULL; } if (targetSampleTimeUs >= 0ll) { buffer->meta_data()->setInt64( kKeyTargetTime, targetSampleTimeUs); } *out = buffer; return OK; }
status_t ExtendedWriter::threadFunc() { mEstimatedDurationUs = 0; mEstimatedSizeBytes = 0; bool stoppedPrematurely = true; int64_t previousPausedDurationUs = 0; int64_t maxTimestampUs = 0; status_t err = OK; pid_t tid = gettid(); androidSetThreadPriority(tid, ANDROID_PRIORITY_AUDIO); prctl(PR_SET_NAME, (unsigned long)"ExtendedWriter", 0, 0, 0); while (!mDone) { MediaBuffer *buffer; err = mSource->read(&buffer); if (err != OK) { break; } if (mPaused) { buffer->release(); buffer = NULL; continue; } mEstimatedSizeBytes += buffer->range_length(); if (exceedsFileSizeLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0); break; } int64_t timestampUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs)); if (timestampUs > mEstimatedDurationUs) { mEstimatedDurationUs = timestampUs; } if (mResumed) { previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000); mResumed = false; } timestampUs -= previousPausedDurationUs; ALOGV("time stamp: %lld, previous paused duration: %lld", timestampUs, previousPausedDurationUs); if (timestampUs > maxTimestampUs) { maxTimestampUs = timestampUs; } if (exceedsFileDurationLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0); break; } ssize_t n = fwrite( (const uint8_t *)buffer->data() + buffer->range_offset(), 1, buffer->range_length(), mFile); mOffset += n; if (n < (ssize_t)buffer->range_length()) { buffer->release(); buffer = NULL; break; } // XXX: How to tell it is stopped prematurely? if (stoppedPrematurely) { stoppedPrematurely = false; } buffer->release(); buffer = NULL; } if (stoppedPrematurely) { notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR); } if ( mFormat == AUDIO_FORMAT_QCELP ) { writeQCPHeader( ); } else if ( mFormat == AUDIO_FORMAT_EVRC ) { writeEVRCHeader( ); } fflush(mFile); fclose(mFile); mFile = NULL; mReachedEOS = true; if (err == ERROR_END_OF_STREAM || (err == -ETIMEDOUT)) { return OK; } return err; }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { sp<MetaData> format = source->getFormat(); // XXX: // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can // remove this check and always set the decoder output color format if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); } sp<MediaSource> decoder = OMXCodec::Create( client->interface(), format, false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { ALOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { ALOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0) { if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime) || thumbNailTime < 0) { thumbNailTime = 0; } options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK(buffer == NULL); ALOGV("decoding frame failed."); decoder->stop(); return NULL; } ALOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { ALOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); ALOGV("thumbNailTime = %" PRId64 " us, timeUs = %" PRId64 " us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t crop_left, crop_top, crop_right, crop_bottom; if (!meta->findRect( kKeyCropRect, &crop_left, &crop_top, &crop_right, &crop_bottom)) { crop_left = crop_top = 0; crop_right = width - 1; crop_bottom = height - 1; } int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = crop_right - crop_left + 1; frame->mHeight = crop_bottom - crop_top + 1; frame->mDisplayWidth = frame->mWidth; frame->mDisplayHeight = frame->mHeight; frame->mSize = frame->mWidth * frame->mHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t displayWidth, displayHeight; if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) { frame->mDisplayWidth = displayWidth; } if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) { frame->mDisplayHeight = displayHeight; } int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); if (converter.isValid()) { err = converter.convert( (const uint8_t *)buffer->data() + buffer->range_offset(), width, height, crop_left, crop_top, crop_right, crop_bottom, frame->mData, frame->mWidth, frame->mHeight, 0, 0, frame->mWidth - 1, frame->mHeight - 1); } else { ALOGE("Unable to instantiate color conversion from format 0x%08x to " "RGB565", srcFormat); err = ERROR_UNSUPPORTED; } buffer->release(); buffer = NULL; decoder->stop(); if (err != OK) { ALOGE("Colorconverter failed to convert frame."); delete frame; frame = NULL; } return frame; }
status_t AMRWriter::threadFunc() { mEstimatedDurationUs = 0; mEstimatedSizeBytes = 0; bool stoppedPrematurely = true; int64_t previousPausedDurationUs = 0; int64_t maxTimestampUs = 0; status_t err = OK; prctl(PR_SET_NAME, (unsigned long)"AMRWriter", 0, 0, 0); while (!mDone) { MediaBuffer *buffer; err = mSource->read(&buffer); if (err != OK) { break; } if (mPaused) { buffer->release(); buffer = NULL; continue; } mEstimatedSizeBytes += buffer->range_length(); if (exceedsFileSizeLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0); break; } int64_t timestampUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs)); if (timestampUs > mEstimatedDurationUs) { mEstimatedDurationUs = timestampUs; } if (mResumed) { previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000); mResumed = false; } timestampUs -= previousPausedDurationUs; ALOGV("time stamp: %lld, previous paused duration: %lld", timestampUs, previousPausedDurationUs); if (timestampUs > maxTimestampUs) { maxTimestampUs = timestampUs; } if (exceedsFileDurationLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0); break; } ssize_t n = write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), buffer->range_length()); if (n < (ssize_t)buffer->range_length()) { buffer->release(); buffer = NULL; break; } // XXX: How to tell it is stopped prematurely? if (stoppedPrematurely) { stoppedPrematurely = false; } buffer->release(); buffer = NULL; } if (stoppedPrematurely) { notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR); } close(mFd); mFd = -1; mReachedEOS = true; if ((err == ERROR_END_OF_STREAM)||(err = -ETIMEDOUT)) { return OK; } return err; }
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs) { if (!mVideoSource.get()) return false; ReleaseVideoBuffer(); status_t err; if (aSeekTimeUs != -1) { MediaSource::ReadOptions options; options.setSeekTo(aSeekTimeUs); err = mVideoSource->read(&mVideoBuffer, &options); } else { err = mVideoSource->read(&mVideoBuffer); } if (err == OK && mVideoBuffer->range_length() > 0) { int64_t timeUs; int32_t unreadable; int32_t keyFrame; if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) { LOG("no key time"); return false; } if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { keyFrame = 0; } if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) { unreadable = 0; } LOG("data: %p size: %u offset: %u length: %u unreadable: %d", mVideoBuffer->data(), mVideoBuffer->size(), mVideoBuffer->range_offset(), mVideoBuffer->range_length(), unreadable); char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset(); size_t length = mVideoBuffer->range_length(); if (unreadable) { LOG("video frame is unreadable"); } if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) { return false; } } else if (err == INFO_FORMAT_CHANGED) { // If the format changed, update our cached info. return SetVideoFormat(); } else if (err == ERROR_END_OF_STREAM) { return false; } return true; }
status_t OggWriter::threadFunc() { mEstimatedDurationUs = 0; mEstimatedSizeBytes = 0; bool stoppedPrematurely = true; int64_t previousPausedDurationUs = 0; int64_t maxTimestampUs = 0; status_t err = OK; int64_t ltotalSize = 0; int64_t timestampUs = 0; // int64_t tsUsPauseBeg = 0; int64_t tsUsPauseEnd = 0; //paused sample count int64_t smpPaused = 0; // prctl(PR_SET_NAME, (unsigned long)"OggWriter", 0, 0, 0); //struct sched_param param; //param.sched_priority = RTPM_PRIO_OMX_AUDIO; //sched_setscheduler(0, SCHED_RR, ¶m); //androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); //while (!mDone) { while (1 == 1) { MediaBuffer *buffer = NULL; MediaBuffer *buffer1 = NULL; if (mDone) { buffer = new MediaBuffer(0); buffer1 = buffer; } LOGV("OggWriter::threadFunc:mSource->read+:buffer=%p,buffer1=%p", buffer, buffer1); err = mSource->read(&buffer); LOGV("OggWriter::threadFunc:mSource->read-,err=%d,buffer=%p", err, buffer); if (err != OK) { break; } LOGV("OggWriter::threadFunc:buffer->range_length()=%d", buffer->range_length()); //buffer->range_length() == 0, ogg encoder SWIP caching data if (mPaused || buffer->range_length() == 0) { //mtk80721 deal pause time error+ if (mPaused && mPausedflag) { buffer->meta_data()->findInt64(kKeyTime, &tsUsPauseBeg); LOGD("OggWriter::threadFunc,pausetime=%d,tsUsPauseBeg=%lld", iPausedTime, tsUsPauseBeg); mPausedflag = false; } //- if (buffer->range_length() > 0) { //not vorbis header data should be released if (memcmp(buffer->data() + 29, "vorbis", 6) != 0) { buffer->release(); buffer = NULL; continue; } else { LOGD("ogg header:buffer=%p,size=%d", buffer, buffer->range_length()); } } else { buffer->release(); buffer = NULL; continue; } } mEstimatedSizeBytes += buffer->range_length(); if (exceedsFileSizeLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0); break; } CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs)); if (timestampUs > 0) { if (timestampUs > mEstimatedDurationUs) { mEstimatedDurationUs = timestampUs; } if (mResumed) { //mtk80721 deal pause time error+ buffer->meta_data()->findInt64(kKeyTime, &tsUsPauseEnd); LOGD("previousPausedDurationUs =%lld,pausetime=%d,tsUsPauseBeg=%lld,tsUsPauseEnd=%lld", previousPausedDurationUs, iPausedTime, tsUsPauseBeg, tsUsPauseEnd); previousPausedDurationUs = previousPausedDurationUs + (tsUsPauseEnd - tsUsPauseBeg); smpPaused = previousPausedDurationUs * mSampleRate / 1000000ll; LOGD("previousPausedDurationUs =%lld,samplecount=%lld", previousPausedDurationUs, smpPaused); //previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000); //- mResumed = false; } timestampUs -= previousPausedDurationUs; LOGV("time stamp: %lld, previous paused duration: %lld", timestampUs, previousPausedDurationUs); if (timestampUs > maxTimestampUs) { maxTimestampUs = timestampUs; } } if (exceedsFileDurationLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0); break; } LOGV("OggWriter::threadFunc:fwrite"); //write timestamp uint8_t *ptimestamp = (uint8_t *)buffer->data() + buffer->range_offset() + 6; uint64_t ts = U64LE_AT(ptimestamp); if (smpPaused > 0) { ts -= smpPaused; memcpy(ptimestamp, &ts, sizeof(int64_t)); } ssize_t n = fwrite( (const uint8_t *)buffer->data() + buffer->range_offset(), 1, buffer->range_length(), mFile); ltotalSize += n; if (n < (ssize_t)buffer->range_length()) { buffer->release(); buffer = NULL; break; } // XXX: How to tell it is stopped prematurely? if (stoppedPrematurely) { stoppedPrematurely = false; } LOGV("OggWriter::threadFunc:buffer->release:buffer=%p", buffer); buffer->release(); buffer = NULL; } if (stoppedPrematurely) { notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR); } /* // int bitrate = ltotalSize / (timestampUs/1E6) * 8; LOGV("ltotalSize=%lld, timestampUs=%lld, bitrate=%d",ltotalSize, timestampUs, bitrate); //seek to the bitrate field fseek(mFile, 44, SEEK_SET); // max bitrate fwrite(&bitrate, 1, sizeof(int), mFile); // nominal bitrate fwrite(&bitrate, 1, sizeof(int), mFile); // min bitrate fwrite(&bitrate, 1, sizeof(int), mFile); */ fflush(mFile); fclose(mFile); mFile = NULL; mReachedEOS = true; if (err == ERROR_END_OF_STREAM) { return OK; } return err; }
status_t AACWriter::threadFunc() { mEstimatedDurationUs = 0; mEstimatedSizeBytes = 0; int64_t previousPausedDurationUs = 0; int64_t maxTimestampUs = 0; status_t err = OK; prctl(PR_SET_NAME, (unsigned long)"AACWriterThread", 0, 0, 0); while (!mDone && err == OK) { MediaBuffer *buffer; err = mSource->read(&buffer); if (err != OK) { break; } if (mPaused) { buffer->release(); buffer = NULL; continue; } mEstimatedSizeBytes += kAdtsHeaderLength + buffer->range_length(); if (exceedsFileSizeLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0); break; } int32_t isCodecSpecific = 0; if (buffer->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecSpecific) && isCodecSpecific) { ALOGV("Drop codec specific info buffer"); buffer->release(); buffer = NULL; continue; } int64_t timestampUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs)); if (timestampUs > mEstimatedDurationUs) { mEstimatedDurationUs = timestampUs; } if (mResumed) { previousPausedDurationUs += (timestampUs - maxTimestampUs - mFrameDurationUs); mResumed = false; } timestampUs -= previousPausedDurationUs; ALOGV("time stamp: %lld, previous paused duration: %lld", timestampUs, previousPausedDurationUs); if (timestampUs > maxTimestampUs) { maxTimestampUs = timestampUs; } if (exceedsFileDurationLimit()) { buffer->release(); buffer = NULL; notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0); break; } // Each output AAC audio frame to the file contains // 1. an ADTS header, followed by // 2. the compressed audio data. ssize_t dataLength = buffer->range_length(); uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset(); if (writeAdtsHeader(kAdtsHeaderLength + dataLength) != OK || dataLength != write(mFd, data, dataLength)) { err = ERROR_IO; } buffer->release(); buffer = NULL; } close(mFd); mFd = -1; mReachedEOS = true; if (err == ERROR_END_OF_STREAM) { return OK; } return err; }
/*! * \brief The decode thread function. * The main Loop is used to read data from decoder, and put them to render. */ void UMMediaPlayer::videoEntry(void) { bool eof = false; MediaBuffer *lastBuffer = NULL; MediaBuffer *buffer; MediaSource::ReadOptions options; mVideoStartTime = 0; dcount =0; UMLOG_ERR("videoEntry() ---- mVideoDecoder->start() begin"); status_t err = mVideoDecoder->start(); if(err != OK) { UMLOG_ERR("videoEntry() ---- mVideoDecoder->start() end failed err=%d", err); mHasDspError = 1; mPlaying = false; if(mVideoSource->HasAnyDataSource()) { mVideoSource->read(&buffer, &options); releaseBufferIfNonNULL(&buffer); } } while(mPlaying && !mVideoSource->HasAnyDataSource()) { usleep(50 * 1000); } while(mPlaying) { status_t err = mVideoDecoder->read(&buffer, &options); options.clearSeekTo(); if(err == INFO_FORMAT_CHANGED) { UMLOG_ERR("VideoSource signalled format change."); if(mVideoRenderer != NULL) { initRenderer(); } continue; } if(err != OK && buffer == NULL) { mHasDspError = 1; mPlaying = false; continue; } dcount++; printPDecodeDateToFile((char*)buffer->data()); if(dcount == 10){ if(fd != NULL){ fclose(fd); } } if(buffer == NULL) { usleep(3000); eof = true; continue; } CHECK((err == OK && buffer != NULL) || (err != OK && buffer == NULL)); if(err != OK) { eof = true; mPlaying = false; continue; } if(buffer->range_length() == 0) { buffer->release(); buffer = NULL; continue; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if(0 == timeUs) { unsigned int currTS = mVideoSource->um_util_getCurrentTick(); if(currTS <= lastRenderTS + UM_RENDER_MAX_INTERVAL) { //There is a frame has been pushed into render in last UM_RENDER_MAX_INTERVAL ms //skip the compensate frame then buffer->release(); buffer = NULL; continue; } /* push componsate frame to render */ } displayOrDiscardFrame(&lastBuffer, buffer, 0); } releaseBufferIfNonNULL(&lastBuffer); shutdownVideoDecoder(); }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { #ifdef OMAP_ENHANCEMENT flags |= OMXCodec::kPreferThumbnailMode; #ifdef TARGET_OMAP4 int32_t isInterlaced = false; //Call config parser to update profile,level,interlaced,reference frame data updateMetaData(trackMeta); trackMeta->findInt32(kKeyVideoInterlaced, &isInterlaced); if(isInterlaced) { flags |= OMXCodec::kPreferInterlacedOutputContent; } #endif #endif sp<MediaSource> decoder = OMXCodec::Create( client->interface(), source->getFormat(), false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { LOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { LOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { LOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0 && trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) { options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs < 0 ? 0 : frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); #ifdef OMAP_ENHANCEMENT if(err == INFO_FORMAT_CHANGED) { int32_t w1,h1; decoder->getFormat()->findInt32(kKeyWidth, &w1); decoder->getFormat()->findInt32(kKeyHeight, &h1); LOGD("Got portreconfig event. New WxH %dx%d. wait 5mS for port to be enabled",w1,h1); usleep(5000); //sleep 5mS for port disable-enable to complete } #endif options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK_EQ(buffer, NULL); LOGV("decoding frame failed."); decoder->stop(); return NULL; } LOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { LOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); LOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; #if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP4) int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); int32_t format; const char *component; //cache the display width and height int32_t displayWidth, displayHeight; displayWidth = width; displayHeight = height; //update width & height with the buffer width&height if(!(meta->findInt32(kKeyPaddedWidth, &width))) { CHECK(meta->findInt32(kKeyWidth, &width)); } if(!(meta->findInt32(kKeyPaddedHeight, &height))) { CHECK(meta->findInt32(kKeyHeight, &height)); } LOGD("VideoFrame WxH %dx%d", displayWidth, displayHeight); if(((OMX_COLOR_FORMATTYPE)srcFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar) || ((OMX_COLOR_FORMATTYPE)srcFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar_Sequential_TopBottom)){ frame->mWidth = displayWidth; frame->mHeight = displayHeight; frame->mDisplayWidth = displayWidth; frame->mDisplayHeight = displayHeight; frame->mSize = displayWidth * displayHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; }else { frame->mWidth = width; frame->mHeight = height; frame->mDisplayWidth = width; frame->mDisplayHeight = height; frame->mSize = width * height * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; } if(((OMX_COLOR_FORMATTYPE)srcFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar) || ((OMX_COLOR_FORMATTYPE)srcFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar_Sequential_TopBottom)){ ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, //1D buffer in 1.16 Ducati rls. If 2D buffer -> 4096 stride should be used frame->mData, displayWidth * 2, displayWidth,displayHeight,buffer->range_offset(),isInterlaced); } else{ ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, frame->mData, width * 2); } #else frame->mWidth = width; frame->mHeight = height; frame->mDisplayWidth = width; frame->mDisplayHeight = height; frame->mSize = width * height * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, frame->mData, width * 2); #endif buffer->release(); buffer = NULL; decoder->stop(); return frame; }
static void playSource(OMXClient *client, sp<MediaSource> &source) { sp<MetaData> meta = source->getFormat(); const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); sp<MediaSource> rawSource; if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) { rawSource = source; } else { int flags = 0; if (gPreferSoftwareCodec) { flags |= OMXCodec::kPreferSoftwareCodecs; } if (gForceToUseHardwareCodec) { CHECK(!gPreferSoftwareCodec); flags |= OMXCodec::kHardwareCodecsOnly; } rawSource = OMXCodec::Create( client->interface(), meta, false /* createEncoder */, source, NULL /* matchComponentName */, flags, gSurface); if (rawSource == NULL) { fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime); return; } displayAVCProfileLevelIfPossible(meta); } source.clear(); status_t err = rawSource->start(); if (err != OK) { fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err); return; } if (gPlaybackAudio) { AudioPlayer *player = new AudioPlayer(NULL); player->setSource(rawSource); rawSource.clear(); player->start(true /* sourceAlreadyStarted */); status_t finalStatus; while (!player->reachedEOS(&finalStatus)) { usleep(100000ll); } delete player; player = NULL; return; } else if (gReproduceBug >= 3 && gReproduceBug <= 5) { int64_t durationUs; CHECK(meta->findInt64(kKeyDuration, &durationUs)); status_t err; MediaBuffer *buffer; MediaSource::ReadOptions options; int64_t seekTimeUs = -1; for (;;) { err = rawSource->read(&buffer, &options); options.clearSeekTo(); bool shouldSeek = false; if (err == INFO_FORMAT_CHANGED) { CHECK(buffer == NULL); printf("format changed.\n"); continue; } else if (err != OK) { printf("reached EOF.\n"); shouldSeek = true; } else { int64_t timestampUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, ×tampUs)); bool failed = false; if (seekTimeUs >= 0) { int64_t diff = timestampUs - seekTimeUs; if (diff < 0) { diff = -diff; } if ((gReproduceBug == 4 && diff > 500000) || (gReproduceBug == 5 && timestampUs < 0)) { printf("wanted: %.2f secs, got: %.2f secs\n", seekTimeUs / 1E6, timestampUs / 1E6); printf("ERROR: "); failed = true; } } printf("buffer has timestamp %lld us (%.2f secs)\n", timestampUs, timestampUs / 1E6); buffer->release(); buffer = NULL; if (failed) { break; } shouldSeek = ((double)rand() / RAND_MAX) < 0.1; if (gReproduceBug == 3) { shouldSeek = false; } } seekTimeUs = -1; if (shouldSeek) { seekTimeUs = (rand() * (float)durationUs) / RAND_MAX; options.setSeekTo(seekTimeUs); printf("seeking to %lld us (%.2f secs)\n", seekTimeUs, seekTimeUs / 1E6); } } rawSource->stop(); return; } int n = 0; int64_t startTime = getNowUs(); long numIterationsLeft = gNumRepetitions; MediaSource::ReadOptions options; int64_t sumDecodeUs = 0; int64_t totalBytes = 0; Vector<int64_t> decodeTimesUs; while (numIterationsLeft-- > 0) { long numFrames = 0; MediaBuffer *buffer; for (;;) { int64_t startDecodeUs = getNowUs(); status_t err = rawSource->read(&buffer, &options); int64_t delayDecodeUs = getNowUs() - startDecodeUs; options.clearSeekTo(); if (err != OK) { CHECK(buffer == NULL); if (err == INFO_FORMAT_CHANGED) { printf("format changed.\n"); continue; } break; } if (buffer->range_length() > 0) { if (gDisplayHistogram && n > 0) { // Ignore the first time since it includes some setup // cost. decodeTimesUs.push(delayDecodeUs); } if ((n++ % 16) == 0) { printf("."); fflush(stdout); } } sumDecodeUs += delayDecodeUs; totalBytes += buffer->range_length(); buffer->release(); buffer = NULL; ++numFrames; if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) { break; } if (gReproduceBug == 1 && numFrames == 40) { printf("seeking past the end now."); options.setSeekTo(0x7fffffffL); } else if (gReproduceBug == 2 && numFrames == 40) { printf("seeking to 5 secs."); options.setSeekTo(5000000); } } printf("$"); fflush(stdout); options.setSeekTo(0); } rawSource->stop(); printf("\n"); int64_t delay = getNowUs() - startTime; if (!strncasecmp("video/", mime, 6)) { printf("avg. %.2f fps\n", n * 1E6 / delay); printf("avg. time to decode one buffer %.2f usecs\n", (double)sumDecodeUs / n); printf("decoded a total of %d frame(s).\n", n); if (gDisplayHistogram) { displayDecodeHistogram(&decodeTimesUs); } } else if (!strncasecmp("audio/", mime, 6)) { // Frame count makes less sense for audio, as the output buffer // sizes may be different across decoders. printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay); printf("decoded a total of %lld bytes\n", totalBytes); } }
void MPEG4Writer::Track::threadEntry() { bool is_mpeg4 = false; sp<MetaData> meta = mSource->getFormat(); const char *mime; meta->findCString(kKeyMIMEType, &mime); is_mpeg4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4); MediaBuffer *buffer; while (!mDone && mSource->read(&buffer) == OK) { if (buffer->range_length() == 0) { buffer->release(); buffer = NULL; continue; } if (mCodecSpecificData == NULL && is_mpeg4) { const uint8_t *data = (const uint8_t *)buffer->data() + buffer->range_offset(); const size_t size = buffer->range_length(); size_t offset = 0; while (offset + 3 < size) { if (data[offset] == 0x00 && data[offset + 1] == 0x00 && data[offset + 2] == 0x01 && data[offset + 3] == 0xb6) { break; } ++offset; } // CHECK(offset + 3 < size); if (offset + 3 >= size) { // XXX assume the entire first chunk of data is the codec specific // data. offset = size; } mCodecSpecificDataSize = offset; mCodecSpecificData = malloc(offset); memcpy(mCodecSpecificData, data, offset); buffer->set_range(buffer->range_offset() + offset, size - offset); } off_t offset = mOwner->addSample(buffer); SampleInfo info; info.size = buffer->range_length(); info.offset = offset; int32_t units, scale; bool success = buffer->meta_data()->findInt32(kKeyTimeUnits, &units); CHECK(success); success = buffer->meta_data()->findInt32(kKeyTimeScale, &scale); CHECK(success); info.timestamp = (int64_t)units * 1000 / scale; mSampleInfos.push_back(info); buffer->release(); buffer = NULL; } mReachedEOS = true; }
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatStart: { status_t err; ALOGI("start mIsAudio=%d",mIsAudio); if (mIsAudio) { // This atrocity causes AudioSource to deliver absolute // systemTime() based timestamps (off by 1 us). #ifdef MTB_SUPPORT ATRACE_BEGIN_EXT("AudioPuller, kWhatStart"); #endif sp<MetaData> params = new MetaData; params->setInt64(kKeyTime, 1ll); err = mSource->start(params.get()); } else { #ifdef MTB_SUPPORT ATRACE_BEGIN_EXT("VideoPuller, kWhatStart"); #endif err = mSource->start(); if (err != OK) { ALOGE("source failed to start w/ err %d", err); } } if (err == OK) { ALOGI("start done, start to schedulePull data"); schedulePull(); } sp<AMessage> response = new AMessage; response->setInt32("err", err); uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); #ifdef MTB_SUPPORT ATRACE_END_EXT("VideoPuller, kWhatStart"); #endif break; } case kWhatStop: { sp<MetaData> meta = mSource->getFormat(); const char *tmp; CHECK(meta->findCString(kKeyMIMEType, &tmp)); AString mime = tmp; ALOGI("MediaPuller(%s) stopping.", mime.c_str()); mSource->stop(); ALOGI("MediaPuller(%s) stopped.", mime.c_str()); ++mPullGeneration; sp<AMessage> notify; CHECK(msg->findMessage("notify", ¬ify)); notify->post(); break; } case kWhatPull: { int32_t generation; #ifdef MTB_SUPPORT if (mIsAudio) { ATRACE_BEGIN_EXT("AudioPuller, kWhatPull"); } else { ATRACE_BEGIN_EXT("VideoPuller, kWhatPull"); } #endif CHECK(msg->findInt32("generation", &generation)); if (generation != mPullGeneration) { break; } MediaBuffer *mbuf; status_t err = mSource->read(&mbuf); if (mPaused) { if (err == OK) { mbuf->release(); mbuf = NULL; } schedulePull(); break; } if (err != OK) { if (err == ERROR_END_OF_STREAM) { ALOGI("stream ended."); } else { ALOGE("error %d reading stream.", err); } ALOGI("err=%d.post kWhatEOS",err); sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatEOS); notify->post(); } else { int64_t timeUs; CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs)); #ifdef MTB_SUPPORT if (mIsAudio) { ATRACE_ONESHOT(ATRACE_ONESHOT_ADATA, "AudioPuller, TS: %lld ms", timeUs/1000); } else { ATRACE_ONESHOT(ATRACE_ONESHOT_VDATA, "VideoPuller, TS: %lld ms", timeUs/1000); } #endif sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length()); memcpy(accessUnit->data(), (const uint8_t *)mbuf->data() + mbuf->range_offset(), mbuf->range_length()); accessUnit->meta()->setInt64("timeUs", timeUs); #ifndef ANDROID_DEFAULT_CODE sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo(); int64_t MpMs = ALooper::GetNowUs(); debugInfo->addTimeInfoByKey(!mIsAudio , timeUs, "MpIn", MpMs/1000); int64_t NowMpDelta =0; NowMpDelta = (MpMs - timeUs)/1000; if(mFirstDeltaMs == -1){ mFirstDeltaMs = NowMpDelta; ALOGE("[check Input 1th][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms", mIsAudio?"audio":"video",timeUs/1000,NowMpDelta); } NowMpDelta = NowMpDelta - mFirstDeltaMs; if(NowMpDelta > 500ll || NowMpDelta < -500ll ){ ALOGE("[check Input][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms", mIsAudio?"audio":"video",timeUs/1000,NowMpDelta); } #endif if (mIsAudio) { mbuf->release(); mbuf = NULL; ALOGI("[WFDP][%s] ,timestamp=%lld ms",mIsAudio?"audio":"video",timeUs/1000); } else { // video encoder will release MediaBuffer when done // with underlying data. accessUnit->meta()->setPointer("mediaBuffer", mbuf); ALOGI("[WFDP][%s] ,mediaBuffer=%p,timestamp=%lld ms",mIsAudio?"audio":"video",mbuf,timeUs/1000); } sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatAccessUnit); notify->setBuffer("accessUnit", accessUnit); notify->post(); if (mbuf != NULL) { ALOGV("posted mbuf %p", mbuf); } schedulePull(); #ifdef MTB_SUPPORT if (mIsAudio) { ATRACE_END_EXT("AudioPuller, kWhatPull"); } else { ATRACE_END_EXT("VideoPuller, kWhatPull"); } #endif } break; } case kWhatPause: { mPaused = true; break; } case kWhatResume: { mPaused = false; break; } default: TRESPASS(); } }
status_t TimedTextMatroskaSource::read( int64_t *startTimeUs, int64_t *endTimeUs, Parcel *parcel, const MediaSource::ReadOptions *options) { MediaBuffer *textBuffer = NULL; status_t err = mSource->read(&textBuffer, options); if (err != OK) { return err; } if (textBuffer == NULL) { /* let TextPlayer do read after post some time. */ return WOULD_BLOCK; } if (textBuffer->range_length() ==0) { /* let TextPlayer do read after post some time. */ textBuffer->release(); return WOULD_BLOCK; } int64_t curSysTimeUs = GetSysTimeUs(); if ((mPreGetFrmTimeUs >0) && abs(curSysTimeUs - mPreGetFrmTimeUs) <SEND_MKV_TIMED_TEXT_MIN_DELTA_US) { /* skip this frame */ textBuffer->release(); return WOULD_BLOCK; } if (mPreGetFrmTimeUs == -1) { mPreGetFrmTimeUs = curSysTimeUs; } int32_t durMs = 0; *startTimeUs = 0; *endTimeUs = 0; textBuffer->meta_data()->findInt64(kKeyTime, startTimeUs); textBuffer->meta_data()->findInt32(kKeySubtitleDuration, &durMs); *endTimeUs = *startTimeUs + durMs*1000; CHECK_GE(*startTimeUs, 0); if ((mTimedMkvSource.srcMimeType == MKV_TIMED_SRC_MIME_VOBSUB) && mObserver) { mObserver->notifyObserver(MKV_TIMED_MSG_VOBSUB_GET, textBuffer); } else { extractAndAppendLocalDescriptions(*startTimeUs, textBuffer, parcel); } ALOGV("read one mkv text frame, size: %d, timeUs: %lld, durMs: %d", textBuffer->range_length(), *startTimeUs, durMs); mPreGetFrmTimeUs = curSysTimeUs; textBuffer->release(); // endTimeUs is a dummy parameter for Matroska timed text format. // Set a negative value to it to mark it is unavailable. return OK; }
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatStart: { status_t err; if (mIsAudio) { // This atrocity causes AudioSource to deliver absolute // systemTime() based timestamps (off by 1 us). sp<MetaData> params = new MetaData; params->setInt64(kKeyTime, 1ll); err = mSource->start(params.get()); } else { err = mSource->start(); if (err != OK) { ALOGE("source failed to start w/ err %d", err); } } if (err == OK) { schedulePull(); } sp<AMessage> response = new AMessage; response->setInt32("err", err); uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; } case kWhatStop: { sp<MetaData> meta = mSource->getFormat(); const char *tmp; CHECK(meta->findCString(kKeyMIMEType, &tmp)); AString mime = tmp; ALOGI("MediaPuller(%s) stopping.", mime.c_str()); mSource->stop(); ALOGI("MediaPuller(%s) stopped.", mime.c_str()); ++mPullGeneration; sp<AMessage> notify; CHECK(msg->findMessage("notify", ¬ify)); notify->post(); break; } case kWhatPull: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mPullGeneration) { break; } MediaBuffer *mbuf; status_t err = mSource->read(&mbuf); if (mPaused) { if (err == OK) { mbuf->release(); mbuf = NULL; } schedulePull(); break; } if (err != OK) { if (err == ERROR_END_OF_STREAM) { ALOGI("stream ended."); } else { ALOGE("error %d reading stream.", err); } sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatEOS); notify->post(); } else { int64_t timeUs; CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs)); sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length()); memcpy(accessUnit->data(), (const uint8_t *)mbuf->data() + mbuf->range_offset(), mbuf->range_length()); accessUnit->meta()->setInt64("timeUs", timeUs); if (mIsAudio) { mbuf->release(); mbuf = NULL; } else { // video encoder will release MediaBuffer when done // with underlying data. accessUnit->setMediaBufferBase(mbuf); } sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatAccessUnit); notify->setBuffer("accessUnit", accessUnit); notify->post(); if (mbuf != NULL) { ALOGV("posted mbuf %p", mbuf); } schedulePull(); } break; } case kWhatPause: { mPaused = true; break; } case kWhatResume: { mPaused = false; break; } default: TRESPASS(); } }
status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) { *out = NULL; MediaBuffer *buffer = NULL; int64_t timeUs = -1; for (;;) { size_t i; size_t packetSize = 0; bool gotFullPacket = false; for (i = mNextLaceIndex; i < mCurrentPage.mNumSegments; ++i) { uint8_t lace = mCurrentPage.mLace[i]; packetSize += lace; if (lace < 255) { gotFullPacket = true; ++i; break; } } if (mNextLaceIndex < mCurrentPage.mNumSegments) { off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments; for (size_t j = 0; j < mNextLaceIndex; ++j) { dataOffset += mCurrentPage.mLace[j]; } size_t fullSize = packetSize; if (buffer != NULL) { fullSize += buffer->range_length(); } MediaBuffer *tmp = new MediaBuffer(fullSize); if (buffer != NULL) { memcpy(tmp->data(), buffer->data(), buffer->range_length()); tmp->set_range(0, buffer->range_length()); buffer->release(); } else { // XXX Not only is this not technically the correct time for // this packet, we also stamp every packet in this page // with the same time. This needs fixing later. if (mVi.rate) { // Rate may not have been initialized yet if we're currently // reading the configuration packets... // Fortunately, the timestamp doesn't matter for those. timeUs = mCurrentPage.mGranulePosition * 1000000ll / mVi.rate; } tmp->set_range(0, 0); } buffer = tmp; ssize_t n = mSource->readAt( dataOffset, (uint8_t *)buffer->data() + buffer->range_length(), packetSize); if (n < (ssize_t)packetSize) { LOGV("failed to read %d bytes at 0x%016llx, got %ld bytes", packetSize, dataOffset, n); return ERROR_IO; } buffer->set_range(0, fullSize); mNextLaceIndex = i; if (gotFullPacket) { // We've just read the entire packet. if (timeUs >= 0) { buffer->meta_data()->setInt64(kKeyTime, timeUs); } if (mFirstPacketInPage) { buffer->meta_data()->setInt32( kKeyValidSamples, mCurrentPageSamples); mFirstPacketInPage = false; } *out = buffer; return OK; } // fall through, the buffer now contains the start of the packet. } CHECK_EQ(mNextLaceIndex, mCurrentPage.mNumSegments); mOffset += mCurrentPageSize; ssize_t n = readPage(mOffset, &mCurrentPage); if (n <= 0) { if (buffer) { buffer->release(); buffer = NULL; } LOGV("readPage returned %ld", n); return n < 0 ? n : (status_t)ERROR_END_OF_STREAM; } mCurrentPageSamples = mCurrentPage.mGranulePosition - mPrevGranulePosition; mFirstPacketInPage = true; mPrevGranulePosition = mCurrentPage.mGranulePosition; mCurrentPageSize = n; mNextLaceIndex = 0; if (buffer != NULL) { if ((mCurrentPage.mFlags & 1) == 0) { // This page does not continue the packet, i.e. the packet // is already complete. if (timeUs >= 0) { buffer->meta_data()->setInt64(kKeyTime, timeUs); } buffer->meta_data()->setInt32( kKeyValidSamples, mCurrentPageSamples); mFirstPacketInPage = false; *out = buffer; return OK; } } } }
status_t PCMSource::read( MediaBuffer **out, const ReadOptions *options) { *out = NULL; int64_t seekTimeUs; ReadOptions::SeekMode seek = ReadOptions::SEEK_CLOSEST_SYNC; if (options != NULL && options->getSeekTo(&seekTimeUs,&seek)) { int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * 2; if (pos > mSize) { pos = mSize; } mCurrentPos = pos + mOffset; } MediaBuffer *buffer; status_t err = mGroup->acquire_buffer(&buffer); if (err != OK) { return err; } ssize_t n = mDataSource->readAt( mCurrentPos, buffer->data(), mBufferSize); if (n <= 0) { buffer->release(); buffer = NULL; return ERROR_END_OF_STREAM; } mCurrentPos += n; buffer->set_range(0, n); if (mBitsPerSample == 8) { // Convert 8-bit unsigned samples to 16-bit signed. MediaBuffer *tmp; CHECK_EQ(mGroup->acquire_buffer(&tmp), OK); // The new buffer holds the sample number of samples, but each // one is 2 bytes wide. tmp->set_range(0, 2 * n); int16_t *dst = (int16_t *)tmp->data(); const uint8_t *src = (const uint8_t *)buffer->data(); while (n-- > 0) { *dst++ = ((int16_t)(*src) - 128) * 256; ++src; } buffer->release(); buffer = tmp; } else if (mBitsPerSample == 24) { // Convert 24-bit signed samples to 16-bit signed. const uint8_t *src = (const uint8_t *)buffer->data() + buffer->range_offset(); int16_t *dst = (int16_t *)src; size_t numSamples = buffer->range_length() / 3; for (size_t i = 0; i < numSamples; ++i) { int32_t x = (int32_t)(src[0] | src[1] << 8 | src[2] << 16); x = (x << 8) >> 8; // sign extension x = x >> 8; *dst++ = (int16_t)x; src += 3; } buffer->set_range(buffer->range_offset(), 2 * numSamples); } size_t bytesPerSample = mBitsPerSample >> 3; buffer->meta_data()->setInt64( kKeyTime, 1000000LL * (mCurrentPos - mOffset) / (mNumChannels * bytesPerSample) / mSampleRate); *out = buffer; return OK; }
M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, sp<MetaData> metaData) { M4OSA_ERR err = M4NO_ERROR; VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; status_t result = OK; int32_t nbBuffer = 0; int32_t stride = 0; int32_t height = 0; int32_t framerate = 0; int32_t isCodecConfig = 0; size_t size = 0; uint32_t codecFlags = 0; MediaBuffer* inputBuffer = NULL; MediaBuffer* outputBuffer = NULL; sp<VideoEditorVideoEncoderSource> encoderSource = NULL; sp<MediaSource> encoder = NULL;; OMXClient client; ALOGV("VideoEditorVideoEncoder_getDSI begin"); // Input parameters check VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); // Create the encoder source encoderSource = VideoEditorVideoEncoderSource::Create(metaData); VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); // Connect to the OMX client result = client.connect(); VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); // Create the OMX codec // VIDEOEDITOR_FORCECODEC MUST be defined here codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; encoder = OMXCodec::Create(client.interface(), metaData, true, encoderSource, NULL, codecFlags); VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); /** * Send fake frames and retrieve the DSI */ // Send a fake frame to the source metaData->findInt32(kKeyStride, &stride); metaData->findInt32(kKeyHeight, &height); metaData->findInt32(kKeySampleRate, &framerate); size = (size_t)(stride*height*3)/2; inputBuffer = new MediaBuffer(size); inputBuffer->meta_data()->setInt64(kKeyTime, 0); nbBuffer = encoderSource->storeBuffer(inputBuffer); encoderSource->storeBuffer(NULL); // Signal EOS // Call read once to get the DSI result = encoder->start();; VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); result = encoder->read(&outputBuffer, NULL); VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { // For H264, format the DSI result = buildAVCCodecSpecificData( (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), (size_t*)(&(pEncoderContext->mHeader.Size)), (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), outputBuffer->range_length(), encoder->getFormat().get()); outputBuffer->release(); VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); } else { // For MPEG4, just copy the DSI pEncoderContext->mHeader.Size = (M4OSA_UInt32)outputBuffer->range_length(); SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, pEncoderContext->mHeader.Size, "Encoder header"); memcpy((void *)pEncoderContext->mHeader.pBuf, (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), pEncoderContext->mHeader.Size); outputBuffer->release(); } result = encoder->stop(); VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); cleanUp: // Destroy the graph if ( encoder != NULL ) { encoder.clear(); } client.disconnect(); if ( encoderSource != NULL ) { encoderSource.clear(); } if ( M4NO_ERROR == err ) { ALOGV("VideoEditorVideoEncoder_getDSI no error"); } else { ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); } ALOGV("VideoEditorVideoEncoder_getDSI end"); return err; }
int main(int argc, char **argv) { android::ProcessState::self()->startThreadPool(); DataSource::RegisterDefaultSniffers(); const char *rtpFilename = NULL; const char *rtcpFilename = NULL; if (argc == 3) { rtpFilename = argv[1]; rtcpFilename = argv[2]; } else if (argc != 1) { fprintf(stderr, "usage: %s [ rtpFilename rtcpFilename ]\n", argv[0]); return 1; } #if 0 static const uint8_t kSPS[] = { 0x67, 0x42, 0x80, 0x0a, 0xe9, 0x02, 0x83, 0xe4, 0x20, 0x00, 0x00, 0x7d, 0x00, 0x00, 0x0e, 0xa6, 0x00, 0x80 }; static const uint8_t kPPS[] = { 0x68, 0xce, 0x3c, 0x80 }; AString out1, out2; encodeBase64(kSPS, sizeof(kSPS), &out1); encodeBase64(kPPS, sizeof(kPPS), &out2); printf("params=%s,%s\n", out1.c_str(), out2.c_str()); #endif sp<ALooper> looper = new ALooper; sp<UDPPusher> rtp_pusher; sp<UDPPusher> rtcp_pusher; if (rtpFilename != NULL) { rtp_pusher = new UDPPusher(rtpFilename, 5434); looper->registerHandler(rtp_pusher); rtcp_pusher = new UDPPusher(rtcpFilename, 5435); looper->registerHandler(rtcp_pusher); } sp<ARTPSession> session = new ARTPSession; looper->registerHandler(session); #if 0 // My H264 SDP static const char *raw = "v=0\r\n" "o=- 64 233572944 IN IP4 127.0.0.0\r\n" "s=QuickTime\r\n" "t=0 0\r\n" "a=range:npt=0-315\r\n" "a=isma-compliance:2,2.0,2\r\n" "m=video 5434 RTP/AVP 97\r\n" "c=IN IP4 127.0.0.1\r\n" "b=AS:30\r\n" "a=rtpmap:97 H264/90000\r\n" "a=fmtp:97 packetization-mode=1;profile-level-id=42000C;" "sprop-parameter-sets=Z0IADJZUCg+I,aM44gA==\r\n" "a=mpeg4-esid:201\r\n" "a=cliprect:0,0,240,320\r\n" "a=framesize:97 320-240\r\n"; #elif 0 // My H263 SDP static const char *raw = "v=0\r\n" "o=- 64 233572944 IN IP4 127.0.0.0\r\n" "s=QuickTime\r\n" "t=0 0\r\n" "a=range:npt=0-315\r\n" "a=isma-compliance:2,2.0,2\r\n" "m=video 5434 RTP/AVP 97\r\n" "c=IN IP4 127.0.0.1\r\n" "b=AS:30\r\n" "a=rtpmap:97 H263-1998/90000\r\n" "a=cliprect:0,0,240,320\r\n" "a=framesize:97 320-240\r\n"; #elif 0 // My AMR SDP static const char *raw = "v=0\r\n" "o=- 64 233572944 IN IP4 127.0.0.0\r\n" "s=QuickTime\r\n" "t=0 0\r\n" "a=range:npt=0-315\r\n" "a=isma-compliance:2,2.0,2\r\n" "m=audio 5434 RTP/AVP 97\r\n" "c=IN IP4 127.0.0.1\r\n" "b=AS:30\r\n" "a=rtpmap:97 AMR/8000/1\r\n" "a=fmtp:97 octet-align\r\n"; #elif 1 // GTalk's H264 SDP static const char *raw = "v=0\r\n" "o=- 64 233572944 IN IP4 127.0.0.0\r\n" "s=QuickTime\r\n" "t=0 0\r\n" "a=range:npt=now-\r\n" "m=video 5434 RTP/AVP 96\r\n" "c=IN IP4 127.0.0.1\r\n" "b=AS:320000\r\n" "a=rtpmap:96 H264/90000\r\n" "a=fmtp:96 packetization-mode=1;profile-level-id=42001E;" "sprop-parameter-sets=Z0IAHpZUBaHogA==,aM44gA==\r\n" "a=cliprect:0,0,480,270\r\n" "a=framesize:96 720-480\r\n"; #else // sholes H264 SDP static const char *raw = "v=0\r\n" "o=- 64 233572944 IN IP4 127.0.0.0\r\n" "s=QuickTime\r\n" "t=0 0\r\n" "a=range:npt=now-\r\n" "m=video 5434 RTP/AVP 96\r\n" "c=IN IP4 127.0.0.1\r\n" "b=AS:320000\r\n" "a=rtpmap:96 H264/90000\r\n" "a=fmtp:96 packetization-mode=1;profile-level-id=42001E;" "sprop-parameter-sets=Z0KACukCg+QgAAB9AAAOpgCA,aM48gA==\r\n" "a=cliprect:0,0,240,320\r\n" "a=framesize:96 320-240\r\n"; #endif sp<ASessionDescription> desc = new ASessionDescription; CHECK(desc->setTo(raw, strlen(raw))); CHECK_EQ(session->setup(desc), (status_t)OK); if (rtp_pusher != NULL) { rtp_pusher->start(); } if (rtcp_pusher != NULL) { rtcp_pusher->start(); } looper->start(false /* runOnCallingThread */); CHECK_EQ(session->countTracks(), 1u); sp<MediaSource> source = session->trackAt(0); OMXClient client; CHECK_EQ(client.connect(), (status_t)OK); sp<MediaSource> decoder = OMXCodec::Create( client.interface(), source->getFormat(), false /* createEncoder */, source, NULL, 0); // OMXCodec::kPreferSoftwareCodecs); CHECK(decoder != NULL); CHECK_EQ(decoder->start(), (status_t)OK); for (;;) { MediaBuffer *buffer; status_t err = decoder->read(&buffer); if (err != OK) { if (err == INFO_FORMAT_CHANGED) { int32_t width, height; CHECK(decoder->getFormat()->findInt32(kKeyWidth, &width)); CHECK(decoder->getFormat()->findInt32(kKeyHeight, &height)); printf("INFO_FORMAT_CHANGED %d x %d\n", width, height); continue; } LOGE("decoder returned error 0x%08x", err); break; } #if 1 if (buffer->range_length() != 0) { int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); printf("decoder returned frame of size %d at time %.2f secs\n", buffer->range_length(), timeUs / 1E6); } #endif buffer->release(); buffer = NULL; } CHECK_EQ(decoder->stop(), (status_t)OK); looper->stop(); return 0; }
int main(int argc, char **argv) { android::ProcessState::self()->startThreadPool(); DataSource::RegisterDefaultSniffers(); #if 1 if (argc != 2) { fprintf(stderr, "usage: %s filename\n", argv[0]); return 1; } OMXClient client; CHECK_EQ(client.connect(), OK); #if 1 sp<MediaSource> source = createSource(argv[1]); if (source == NULL) { fprintf(stderr, "Unable to find a suitable video track.\n"); return 1; } sp<MetaData> meta = source->getFormat(); sp<MediaSource> decoder = OMXCodec::Create( client.interface(), meta, false /* createEncoder */, source); int width, height; bool success = meta->findInt32(kKeyWidth, &width); success = success && meta->findInt32(kKeyHeight, &height); CHECK(success); #else int width = 800; int height = 480; sp<MediaSource> decoder = new DummySource(width, height); #endif sp<MetaData> enc_meta = new MetaData; // enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); enc_meta->setInt32(kKeyWidth, width); enc_meta->setInt32(kKeyHeight, height); sp<MediaSource> encoder = OMXCodec::Create( client.interface(), enc_meta, true /* createEncoder */, decoder); #if 1 sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/output.mp4"); writer->addSource(encoder); writer->start(); while (!writer->reachedEOS()) { usleep(100000); } writer->stop(); #else encoder->start(); MediaBuffer *buffer; while (encoder->read(&buffer) == OK) { int32_t isSync; if (!buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)) { isSync = false; } printf("got an output frame of size %d%s\n", buffer->range_length(), isSync ? " (SYNC)" : ""); buffer->release(); buffer = NULL; } encoder->stop(); #endif client.disconnect(); #endif #if 0 CameraSource *source = CameraSource::Create(); printf("source = %p\n", source); for (int i = 0; i < 100; ++i) { MediaBuffer *buffer; status_t err = source->read(&buffer); CHECK_EQ(err, OK); printf("got a frame, data=%p, size=%d\n", buffer->data(), buffer->range_length()); buffer->release(); buffer = NULL; } delete source; source = NULL; #endif return 0; }
status_t Harness::testSeek( const char *componentName, const char *componentRole) { bool isEncoder = !strncmp(componentRole, "audio_encoder.", 14) || !strncmp(componentRole, "video_encoder.", 14); if (isEncoder) { // Not testing seek behaviour for encoders. printf(" * Not testing seek functionality for encoders.\n"); return OK; } const char *mime = GetMimeFromComponentRole(componentRole); if (!mime) { LOGI("Cannot perform seek test with this componentRole (%s)", componentRole); return OK; } sp<MediaSource> source = CreateSourceForMime(mime); sp<MediaSource> seekSource = CreateSourceForMime(mime); if (source == NULL || seekSource == NULL) { return UNKNOWN_ERROR; } CHECK_EQ(seekSource->start(), OK); sp<MediaSource> codec = OMXCodec::Create( mOMX, source->getFormat(), false /* createEncoder */, source, componentName); CHECK(codec != NULL); CHECK_EQ(codec->start(), OK); int64_t durationUs; CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs)); LOGI("stream duration is %lld us (%.2f secs)", durationUs, durationUs / 1E6); static const int32_t kNumIterations = 5000; // We are always going to seek beyond EOS in the first iteration (i == 0) // followed by a linear read for the second iteration (i == 1). // After that it's all random. for (int32_t i = 0; i < kNumIterations; ++i) { int64_t requestedSeekTimeUs; int64_t actualSeekTimeUs; MediaSource::ReadOptions options; double r = uniform_rand(); if ((i == 1) || (i > 0 && r < 0.5)) { // 50% chance of just continuing to decode from last position. requestedSeekTimeUs = -1; LOGI("requesting linear read"); } else { if (i == 0 || r < 0.55) { // 5% chance of seeking beyond end of stream. requestedSeekTimeUs = durationUs; LOGI("requesting seek beyond EOF"); } else { requestedSeekTimeUs = (int64_t)(uniform_rand() * durationUs); LOGI("requesting seek to %lld us (%.2f secs)", requestedSeekTimeUs, requestedSeekTimeUs / 1E6); } MediaBuffer *buffer = NULL; options.setSeekTo( requestedSeekTimeUs, MediaSource::ReadOptions::SEEK_NEXT_SYNC); if (seekSource->read(&buffer, &options) != OK) { CHECK_EQ(buffer, NULL); actualSeekTimeUs = -1; } else { CHECK(buffer != NULL); CHECK(buffer->meta_data()->findInt64(kKeyTime, &actualSeekTimeUs)); CHECK(actualSeekTimeUs >= 0); buffer->release(); buffer = NULL; } LOGI("nearest keyframe is at %lld us (%.2f secs)", actualSeekTimeUs, actualSeekTimeUs / 1E6); } status_t err; MediaBuffer *buffer; for (;;) { err = codec->read(&buffer, &options); options.clearSeekTo(); if (err == INFO_FORMAT_CHANGED) { CHECK_EQ(buffer, NULL); continue; } if (err == OK) { CHECK(buffer != NULL); if (buffer->range_length() == 0) { buffer->release(); buffer = NULL; continue; } } else { CHECK_EQ(buffer, NULL); } break; } if (requestedSeekTimeUs < 0) { // Linear read. if (err != OK) { CHECK_EQ(buffer, NULL); } else { CHECK(buffer != NULL); buffer->release(); buffer = NULL; } } else if (actualSeekTimeUs < 0) { EXPECT(err != OK, "We attempted to seek beyond EOS and expected " "ERROR_END_OF_STREAM to be returned, but instead " "we got a valid buffer."); EXPECT(err == ERROR_END_OF_STREAM, "We attempted to seek beyond EOS and expected " "ERROR_END_OF_STREAM to be returned, but instead " "we found some other error."); CHECK_EQ(err, ERROR_END_OF_STREAM); CHECK_EQ(buffer, NULL); } else { EXPECT(err == OK, "Expected a valid buffer to be returned from " "OMXCodec::read."); CHECK(buffer != NULL); int64_t bufferTimeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &bufferTimeUs)); if (!CloseEnough(bufferTimeUs, actualSeekTimeUs)) { printf("\n * Attempted seeking to %lld us (%.2f secs)", requestedSeekTimeUs, requestedSeekTimeUs / 1E6); printf("\n * Nearest keyframe is at %lld us (%.2f secs)", actualSeekTimeUs, actualSeekTimeUs / 1E6); printf("\n * Returned buffer was at %lld us (%.2f secs)\n\n", bufferTimeUs, bufferTimeUs / 1E6); buffer->release(); buffer = NULL; CHECK_EQ(codec->stop(), OK); return UNKNOWN_ERROR; } buffer->release(); buffer = NULL; } } CHECK_EQ(codec->stop(), OK); return OK; }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { sp<MetaData> format = source->getFormat(); #ifndef MTK_HARDWARE // XXX: // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can // remove this check and always set the decoder output color format // skip this check for software decoders #ifndef QCOM_HARDWARE if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); #else if (!(flags & OMXCodec::kSoftwareCodecsOnly)) { if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); } #endif } #endif sp<MediaSource> decoder = OMXCodec::Create( client->interface(), format, false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { ALOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { ALOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0) { if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime) || thumbNailTime < 0) { thumbNailTime = 0; } options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK(buffer == NULL); ALOGV("decoding frame failed."); decoder->stop(); return NULL; } ALOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { ALOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t crop_left, crop_top, crop_right, crop_bottom; if (!meta->findRect( kKeyCropRect, &crop_left, &crop_top, &crop_right, &crop_bottom)) { crop_left = crop_top = 0; crop_right = width - 1; crop_bottom = height - 1; } int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = crop_right - crop_left + 1; frame->mHeight = crop_bottom - crop_top + 1; frame->mDisplayWidth = frame->mWidth; frame->mDisplayHeight = frame->mHeight; frame->mSize = frame->mWidth * frame->mHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t displayWidth, displayHeight; if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) { frame->mDisplayWidth = displayWidth; } if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) { frame->mDisplayHeight = displayHeight; } int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); #ifdef MTK_HARDWARE { int32_t Stridewidth,SliceHeight; CHECK(meta->findInt32(kKeyStride, &Stridewidth)); CHECK(meta->findInt32(kKeySliceHeight, &SliceHeight)); ALOGD("kKeyWidth=%d,kKeyHeight=%d",width,height); ALOGD("Stridewidth=%d,SliceHeight=%d",Stridewidth,SliceHeight); width=Stridewidth; height=SliceHeight; } #endif ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); if (converter.isValid()) { err = converter.convert( (const uint8_t *)buffer->data() + buffer->range_offset(), width, height, crop_left, crop_top, crop_right, crop_bottom, frame->mData, frame->mWidth, frame->mHeight, 0, 0, frame->mWidth - 1, frame->mHeight - 1); } else { ALOGE("Unable to instantiate color conversion from format 0x%08x to " "RGB565", srcFormat); err = ERROR_UNSUPPORTED; } buffer->release(); buffer = NULL; decoder->stop(); if (err != OK) { ALOGE("Colorconverter failed to convert frame."); delete frame; frame = NULL; } return frame; } VideoFrame *StagefrightMetadataRetriever::getFrameAtTime( int64_t timeUs, int option) { ALOGV("getFrameAtTime: %lld us option: %d", timeUs, option); if (mExtractor.get() == NULL) { ALOGV("no extractor."); return NULL; } sp<MetaData> fileMeta = mExtractor->getMetaData(); if (fileMeta == NULL) { ALOGV("extractor doesn't publish metadata, failed to initialize?"); return NULL; } int32_t drm = 0; if (fileMeta->findInt32(kKeyIsDRM, &drm) && drm != 0) { ALOGE("frame grab not allowed."); return NULL; } size_t n = mExtractor->countTracks(); size_t i; for (i = 0; i < n; ++i) { sp<MetaData> meta = mExtractor->getTrackMetaData(i); const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); if (!strncasecmp(mime, "video/", 6)) { break; } } if (i == n) { ALOGV("no video track found."); return NULL; } sp<MetaData> trackMeta = mExtractor->getTrackMetaData( i, MediaExtractor::kIncludeExtensiveMetaData); sp<MediaSource> source = mExtractor->getTrack(i); if (source.get() == NULL) { ALOGV("unable to instantiate video track."); return NULL; } const void *data; uint32_t type; size_t dataSize; if (fileMeta->findData(kKeyAlbumArt, &type, &data, &dataSize) && mAlbumArt == NULL) { mAlbumArt = new MediaAlbumArt; mAlbumArt->mSize = dataSize; mAlbumArt->mData = new uint8_t[dataSize]; memcpy(mAlbumArt->mData, data, dataSize); } VideoFrame *frame = extractVideoFrameWithCodecFlags( #ifndef QCOM_HARDWARE &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs, #else &mClient, trackMeta, source, OMXCodec::kSoftwareCodecsOnly, #endif timeUs, option); if (frame == NULL) { ALOGV("Software decoder failed to extract thumbnail, " "trying hardware decoder."); frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0, timeUs, option); } return frame; }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags) { sp<MediaSource> decoder = OMXCodec::Create( client->interface(), source->getFormat(), false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { LOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { LOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; int64_t thumbNailTime; if (trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) { options.setSeekTo(thumbNailTime); } else { thumbNailTime = -1; } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK_EQ(buffer, NULL); LOGV("decoding frame failed."); decoder->stop(); return NULL; } LOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { LOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); LOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = width; frame->mHeight = height; frame->mDisplayWidth = width; frame->mDisplayHeight = height; frame->mSize = width * height * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); ColorConverter converter( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); converter.convert( width, height, (const uint8_t *)buffer->data() + buffer->range_offset(), 0, frame->mData, width * 2); buffer->release(); buffer = NULL; decoder->stop(); return frame; }