status_t MyVorbisExtractor::init() { mMeta = new MetaData; mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); MediaBuffer *packet; status_t err; if ((err = readNextPacket(&packet)) != OK) { return err; } ALOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 1); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } ALOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 3); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } ALOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 5); packet->release(); packet = NULL; if (err != OK) { return err; } mFirstDataOffset = mOffset + mCurrentPageSize; off64_t size; uint64_t lastGranulePosition; if (mSource->getSize(&size) == OK && findPrevGranulePosition(size, &lastGranulePosition) == OK) { // Let's assume it's cheap to seek to the end. // The granule position of the final page in the stream will // give us the exact duration of the content, something that // we can only approximate using avg. bitrate if seeking to // the end is too expensive or impossible (live streaming). int64_t durationUs = lastGranulePosition * 1000000ll / mVi.rate; mMeta->setInt64(kKeyDuration, durationUs); buildTableOfContents(); } return OK; }
static void performSeekTest(const sp<MediaSource> &source) { CHECK_EQ((status_t)OK, source->start()); int64_t durationUs; CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs)); for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs; seekTimeUs += 60000ll) { MediaSource::ReadOptions options; options.setSeekTo( seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); MediaBuffer *buffer; status_t err; for (;;) { err = source->read(&buffer, &options); options.clearSeekTo(); if (err == INFO_FORMAT_CHANGED) { CHECK(buffer == NULL); continue; } if (err != OK) { CHECK(buffer == NULL); break; } if (buffer->range_length() > 0) { break; } CHECK(buffer != NULL); buffer->release(); buffer = NULL; } if (err == OK) { int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); printf("%lld\t%lld\t%lld\n", seekTimeUs, timeUs, seekTimeUs - timeUs); buffer->release(); buffer = NULL; } else { printf("ERROR\n"); break; } } CHECK_EQ((status_t)OK, source->stop()); }
static void dumpSource(const sp<MediaSource> &source, const String8 &filename) { FILE *out = fopen(filename.string(), "wb"); CHECK_EQ((status_t)OK, source->start()); status_t err; for (;;) { MediaBuffer *mbuf; err = source->read(&mbuf); if (err == INFO_FORMAT_CHANGED) { continue; } else if (err != OK) { break; } CHECK_EQ( fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(), 1, mbuf->range_length(), out), (ssize_t)mbuf->range_length()); mbuf->release(); mbuf = NULL; } CHECK_EQ((status_t)OK, source->stop()); fclose(out); out = NULL; }
status_t VideoSourceDownSampler::read( MediaBuffer **buffer, const ReadOptions *options) { LOGV("read"); MediaBuffer *realBuffer; status_t err = mRealVideoSource->read(&realBuffer, options); if (mNeedDownSampling) { downSampleYUVImage(*realBuffer, buffer); int64_t frameTime; realBuffer->meta_data()->findInt64(kKeyTime, &frameTime); (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); // We just want this buffer to be deleted when the encoder releases it. // So don't add a reference to it and set the observer to NULL. (*buffer)->setObserver(NULL); // The original buffer is no longer required. Release it. realBuffer->release(); } else { *buffer = realBuffer; } return err; }
void VideoCodec::encode_video() { LOGD("VideoCodec::%s", __FUNCTION__); uint32_t framecount = 0; for (; ;) { MediaBuffer *mVideoBuffer; MediaSource::ReadOptions options; LOGD("try read frame"); status_t err = mVideoEncoder->read(&mVideoBuffer, &options); LOGD("encode read ret = 0x%08x", err); if (err == OK) { if (mVideoBuffer->range_length() > 0) { // If video frame availabe, render it to mNativeWindow // sp<MetaData> metaData = mVideoBuffer->meta_data(); // int64_t timeUs = 0; // metaData->findInt64(kKeyTime, &timeUs) // native_window_set_buffers_timestamp(mNativeWindow.get(), timeUs * 1000); // err = mNativeWindow->queueBuffer(mNativeWindow.get(), // mVideoBuffer->graphicBuffer().get()); // if (err == 0) { // metaData->setInt32(kKeyRendered, 1); // } framecount++; LOGD("encode frame success, framecount=%d", framecount); } if(framecount > 300){ break; } mVideoBuffer->release(); } // if (mMp4Write->reachedEOS()) { // LOGD("VideoCodec EOF"); // break; // } } }
bool OmxJpegImageDecoder::decodeSource(sp<MediaSource> decoder, const sp<MediaSource>& source, SkBitmap* bm) { status_t rt = decoder->start(); if (rt != OK) { ALOGE("Cannot start OMX Decoder!"); return false; } int64_t startTime = getNowUs(); MediaBuffer *buffer; // decode source status_t err = decoder->read(&buffer, NULL); int64_t duration = getNowUs() - startTime; if (err != OK) { CHECK(buffer == NULL); } printf("Duration in decoder->read(): %.1f (msecs). \n", duration / 1E3 ); // Copy pixels from buffer to bm. // May need to check buffer->rawBytes() == bm->rawBytes(). CHECK_EQ(buffer->size(), bm->getSize()); memcpy(bm->getPixels(), buffer->data(), buffer->size()); buffer->release(); decoder->stop(); return true; }
bool AudioFDK::ReadUntilTime(double timeSecs) { LOGTRACE("%s", __func__); status_t res = ERROR_END_OF_STREAM; MediaBuffer* mediaBuffer = NULL; int64_t targetTimeUs = (int64_t)(timeSecs * 1000000.0f); int64_t timeUs = 0; LOGI("Starting read to %f seconds: targetTimeUs = %lld", timeSecs, targetTimeUs); while (timeUs < targetTimeUs) { if(mAudioSource.get()) res = mAudioSource->read(&mediaBuffer, NULL); else if(mAudioSource23.get()) res = mAudioSource23->read(&mediaBuffer, NULL); else { // Set timeUs to our target, and let the loop fall out so that we can get the timestamp // set properly. timeUs = targetTimeUs; continue; } if (res == OK) { bool rval = mediaBuffer->meta_data()->findInt64(kKeyTime, &timeUs); if (!rval) { LOGI("Frame did not have time value: STOPPING"); timeUs = 0; } //LOGI("Finished reading from the media buffer"); RUNDEBUG(mediaBuffer->meta_data()->dumpToLog()); LOGTIMING("key time = %lld | target time = %lld", timeUs, targetTimeUs); } else if (res == INFO_FORMAT_CHANGED) { LOGI("Audio Stream Format Changed"); } else if (res == ERROR_END_OF_STREAM) { LOGE("End of Audio Stream"); return false; } if (mediaBuffer != NULL) { mediaBuffer->release(); mediaBuffer = NULL; } sched_yield(); } mTimeStampOffset = ((double)timeUs / 1000000.0f); return true; }
status_t TimedTextVOBSUBSource::read( int64_t *startTimeUs, int64_t *endTimeUs, Parcel *parcel, const MediaSource::ReadOptions *options) { MediaBuffer *textBuffer = NULL; uint32_t type; const int *paletteData; size_t paletteDataSize = 0; status_t err = mSource->read(&textBuffer, options); if (err != OK) { ALOGE("mSource->read() failed, error code %d\n", err); return err; } CHECK(textBuffer != NULL); textBuffer->meta_data()->findInt64(kKeyTime, startTimeUs); char * content = (char *)textBuffer->data(); size_t size = textBuffer->size(); CHECK_GE(*startTimeUs, 0); mSubParser->stInit(content, size); do { err = mSubParser->stParseControlPacket(); if (err != OK) break; if (mSubParser->m_iDataPacketSize <= 4) break; if (err != OK) break; err = mSubParser->stParseDataPacket(NULL, 0); if (err != OK) break; //*startTimeUs = (int64_t)(mSubParser->m_iBeginTime); ALOGE("Call extractAndAppendLocalDescriptions, send data to \n"); extractAndAppendLocalDescriptions(*startTimeUs, textBuffer, parcel); } while (false); textBuffer->release(); *endTimeUs = -1; mSubParser->incTmpFileIdx(); ALOGE("read() finished\n"); return OK; }
void MatroskaSource::clearPendingFrames() { while (!mPendingFrames.empty()) { MediaBuffer *frame = *mPendingFrames.begin(); mPendingFrames.erase(mPendingFrames.begin()); frame->release(); frame = NULL; } }
void OmxDecoder::ReleaseAllPendingVideoBuffersLocked() { int size = mPendingVideoBuffers.size(); for (int i = 0; i < size; i++) { MediaBuffer *buffer = mPendingVideoBuffers[i]; buffer->release(); } mPendingVideoBuffers.clear(); }
status_t MyVorbisExtractor::init() { mMeta = new MetaData; mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); MediaBuffer *packet; status_t err; if ((err = readNextPacket(&packet)) != OK) { return err; } LOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 1); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } LOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 3); packet->release(); packet = NULL; if (err != OK) { return err; } if ((err = readNextPacket(&packet)) != OK) { return err; } LOGV("read packet of size %d\n", packet->range_length()); err = verifyHeader(packet, 5); packet->release(); packet = NULL; if (err != OK) { return err; } mFirstDataOffset = mOffset + mCurrentPageSize; return OK; }
int main(int argc, char **argv) { android::ProcessState::self()->startThreadPool(); OMXClient client; CHECK_EQ(client.connect(), OK); const int32_t kSampleRate = 22050; const int32_t kNumChannels = 2; sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels); #if 0 sp<MediaPlayerBase::AudioSink> audioSink; AudioPlayer *player = new AudioPlayer(audioSink); player->setSource(audioSource); player->start(); sleep(10); player->stop(); #endif sp<MetaData> encMeta = new MetaData; encMeta->setCString(kKeyMIMEType, 1 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC); encMeta->setInt32(kKeySampleRate, kSampleRate); encMeta->setInt32(kKeyChannelCount, kNumChannels); encMeta->setInt32(kKeyMaxInputSize, 8192); sp<MediaSource> encoder = OMXCodec::Create(client.interface(), encMeta, true, audioSource); encoder->start(); int32_t n = 0; status_t err; MediaBuffer *buffer; while ((err = encoder->read(&buffer)) == OK) { printf("."); fflush(stdout); buffer->release(); buffer = NULL; if (++n == 100) { break; } } printf("$\n"); encoder->stop(); client.disconnect(); return 0; }
int main(int argc, char **argv) { // TODO: Commandline arguments to set sample rate, channels, output format const int audioChannels = 1; const int audioSampleRate = 16000; const char *file = "/data/pcm"; if (argc > 1) { file = argv[1]; } printf("Writing PCM data to %s\n", file); fd = open(file, O_WRONLY | O_CREAT, 0440); if (fd < 0) { perror(NULL); return errno; } printf("^C to stop\n"); sp<MediaSource> audioSource( new AudioSource( AUDIO_SOURCE_MIC, #ifdef TARGET_GE_MARSHMALLOW String16("silk-mic"), #endif audioSampleRate, audioChannels ) ); sp<Observer> observer = new Observer(); audioSource = new AudioSourceEmitter(audioSource, observer, audioSampleRate, audioChannels); status_t err = audioSource->start(); if (err != 0) { printf("Start failed: %d\n", err); return 1; } for (;;) { MediaBuffer *buffer; status_t err = audioSource->read(&buffer); if (err != ::OK) { printf("Error reading from source: %d\n", err); return 1; } if (buffer == NULL) { printf("Failed to get buffer from source\n"); return 1; } buffer->release(); } return 0; }
MediaBufferPool::~MediaBufferPool() { MediaBuffer *next; for (MediaBuffer *buffer = mFirstBuffer; buffer != NULL; buffer = next) { next = buffer->nextBuffer(); CHECK_EQ(buffer->refcount(), 0); buffer->setObserver(NULL); buffer->release(); } }
status_t RtspMediaSource::stop() { ReentrantMonitorAutoEnter mon(mMonitor); if (mIsStarted) { if (mBuffer) { mBuffer->release(); mBuffer = nullptr; } mGroup = nullptr; mIsStarted = false; } return OK; }
status_t TimedTextVOBSUBSource::parse( uint8_t* text, size_t size, int64_t startTimeUs, int64_t endTimeUs, Parcel *parcel) { MediaBuffer *textBuffer = new MediaBuffer(text,size); uint32_t type; status_t err; const int *paletteData; size_t paletteDataSize = 0; CHECK(textBuffer != NULL); mSubParser->stInit(text, size); do { err = mSubParser->stParseControlPacket(); if (err != OK) break; if (mSubParser->m_iDataPacketSize <= 4) break; if (err != OK) break; err = mSubParser->stParseDataPacket(NULL, 0); if (err != OK) break; //*startTimeUs = (int64_t)(mSubParser->m_iBeginTime); ALOGE("Call extractAndAppendLocalDescriptions, send data to \n"); extractAndAppendLocalDescriptions(startTimeUs, textBuffer, parcel); } while (false); textBuffer->release(); mSubParser->incTmpFileIdx(); ALOGE("read() finished\n"); return OK; }
status_t TimedTextSSASource::parse( uint8_t* text, size_t size, int64_t startTimeUs, int64_t endTimeUs, Parcel *parcel) { ALOGE("[--dbg--] ass parse satart"); MediaBuffer *textBuffer = new MediaBuffer(text,size); CHECK(textBuffer != NULL); ALOGE("[--dbg--] ass parse sTime=%lld, eTime=%lld", startTimeUs, endTimeUs); extractAndAppendLocalDescriptions(startTimeUs, textBuffer, parcel); textBuffer->release(); return OK; }
bool loop() { for (;;) { MediaBuffer *buffer; status_t err = mMediaSource->read(&buffer); if (err != ::OK) { ALOGE("Error reading from %s source: %d", mName, err); return false; } if (buffer == NULL) { ALOGE("Failed to get buffer from %d source", mName); return false; } buffer->release(); } return true; };
status_t TimedTextASSSource::in_read( int64_t *startTimeUs, int64_t *endTimeUs, Parcel *parcel, const MediaSource::ReadOptions *options) { MediaBuffer *textBuffer = NULL; status_t err = mInSource->read(&textBuffer, options); if (err != OK) { return err; } CHECK(textBuffer != NULL); textBuffer->meta_data()->findInt64(kKeyTime, startTimeUs); textBuffer->meta_data()->findInt64(kKeyDriftTime, endTimeUs); //CHECK_GE(*startTimeUs, 0); ALOGE("[--dbg--] ass internal subtitle in_read sTime=%lld, eTime=%lld", *startTimeUs, *endTimeUs); extractAndAppendLocalDescriptions(*startTimeUs, textBuffer, parcel); textBuffer->release(); return OK; }
status_t AACSource::read( MediaBuffer **out, const ReadOptions *options) { *out = NULL; int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options && options->getSeekTo(&seekTimeUs, &mode)) { if (mFrameDurationUs > 0) { int64_t seekFrame = seekTimeUs / mFrameDurationUs; mCurrentTimeUs = seekFrame * mFrameDurationUs; mOffset = mOffsetVector.itemAt(seekFrame); } } size_t frameSize, frameSizeWithoutHeader, headerSize; if ((frameSize = getAdtsFrameLength(mDataSource, mOffset, &headerSize)) == 0) { return ERROR_END_OF_STREAM; } MediaBuffer *buffer; status_t err = mGroup->acquire_buffer(&buffer); if (err != OK) { return err; } frameSizeWithoutHeader = frameSize - headerSize; if (mDataSource->readAt(mOffset + headerSize, buffer->data(), frameSizeWithoutHeader) != (ssize_t)frameSizeWithoutHeader) { buffer->release(); buffer = NULL; return ERROR_IO; } buffer->set_range(0, frameSizeWithoutHeader); buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs); buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); mOffset += frameSize; mCurrentTimeUs += mFrameDurationUs; *out = buffer; return OK; }
// pretend to read the source buffers void DummyRecorder::readFromSource() { ALOGV("ReadFromSource"); if (!mStarted) { return; } status_t err = OK; MediaBuffer *buffer; ALOGV("A fake writer accessing the frames"); while (mStarted && (err = mSource->read(&buffer)) == OK){ // if not getting a valid buffer from source, then exit if (buffer == NULL) { return; } buffer->release(); buffer = NULL; } }
void MuxEngine::threadEntry(){ for(;;){ Mutex::Autolock ao(mLock); if(mDone){ break; } MediaBuffer* buffer = NULL; MediaBuffer* out;//TODO: mean nothing. bool res; if(mNextReadAudio && mAudioSrc != NULL){ res = mAudioSrc->read(&buffer); if(!res) break; mFFMPEG->encodeAudio(buffer, out); if(mVideoSrc != NULL) mNextReadAudio = false; }else if(!mNextReadAudio && mVideoSrc != NULL){ res = mVideoSrc->read(&buffer); if(!res) break; // mFFMPEG->encodeVideo(buffer, out); ALOGI("encodeVideo"); if(mAudioSrc != NULL) mNextReadAudio = true; } buffer->release(); } { ALOGW("MuxEngine thread exited!!"); Mutex::Autolock ao(mLock); mThreadExited = true; mCond.signal(); } }
void OmxDecoder::ReleaseAllPendingVideoBuffersLocked() { Vector<BufferItem> releasingVideoBuffers; { Mutex::Autolock autoLock(mPendingVideoBuffersLock); int size = mPendingVideoBuffers.size(); for (int i = 0; i < size; i++) { releasingVideoBuffers.push(mPendingVideoBuffers[i]); } mPendingVideoBuffers.clear(); } // Free all pending video buffers without holding mPendingVideoBuffersLock. int size = releasingVideoBuffers.size(); for (int i = 0; i < size; i++) { MediaBuffer *buffer; buffer = releasingVideoBuffers[i].mMediaBuffer; #if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 17 android::sp<Fence> fence; int fenceFd = -1; fence = releasingVideoBuffers[i].mReleaseFenceHandle.mFence; if (fence.get() && fence->isValid()) { fenceFd = fence->dup(); } MOZ_ASSERT(buffer->refcount() == 1); // This code expect MediaBuffer's ref count is 1. // Return gralloc buffer to ANativeWindow ANativeWindow* window = static_cast<ANativeWindow*>(mNativeWindowClient.get()); window->cancelBuffer(window, buffer->graphicBuffer().get(), fenceFd); // Mark MediaBuffer as rendered. // When gralloc buffer is directly returned to ANativeWindow, // this mark is necesary. sp<MetaData> metaData = buffer->meta_data(); metaData->setInt32(kKeyRendered, 1); #endif // Return MediaBuffer to OMXCodec. buffer->release(); } releasingVideoBuffers.clear(); }
status_t MatroskaSource::readBlock() { CHECK(mPendingFrames.empty()); if (mBlockIter.eos()) { return ERROR_END_OF_STREAM; } const mkvparser::Block *block = mBlockIter.block(); int64_t timeUs = mBlockIter.blockTimeUs(); for (int i = 0; i < block->GetFrameCount(); ++i) { const mkvparser::Block::Frame &frame = block->GetFrame(i); MediaBuffer *mbuf = new MediaBuffer(frame.len); mbuf->meta_data()->setInt64(kKeyTime, timeUs); mbuf->meta_data()->setInt32(kKeyIsSyncFrame, block->IsKey()); status_t err = frame.Read(mExtractor->mReader, static_cast<uint8_t *>(mbuf->data())); if (err == OK && mExtractor->mIsWebm && mExtractor->mTracks.itemAt(mTrackIndex).mEncrypted) { err = setWebmBlockCryptoInfo(mbuf); } if (err != OK) { mPendingFrames.clear(); mBlockIter.advance(); mbuf->release(); return err; } mPendingFrames.push_back(mbuf); } mBlockIter.advance(); return OK; }
void OmxDecoder::ReleaseAllPendingVideoBuffersLocked() { Vector<MediaBuffer *> releasingVideoBuffers; { Mutex::Autolock autoLock(mPendingVideoBuffersLock); int size = mPendingVideoBuffers.size(); for (int i = 0; i < size; i++) { MediaBuffer *buffer = mPendingVideoBuffers[i]; releasingVideoBuffers.push(buffer); } mPendingVideoBuffers.clear(); } // Free all pending video buffers without holding mPendingVideoBuffersLock. int size = releasingVideoBuffers.size(); for (int i = 0; i < size; i++) { MediaBuffer *buffer; buffer = releasingVideoBuffers[i]; buffer->release(); } releasingVideoBuffers.clear(); }
status_t TimedText3GPPSource::read( int64_t *startTimeUs, int64_t *endTimeUs, Parcel *parcel, const MediaSource::ReadOptions *options) { MediaBuffer *textBuffer = NULL; status_t err = mSource->read(&textBuffer, options); if (err != OK) { return err; } CHECK(textBuffer != NULL); textBuffer->meta_data()->findInt64(kKeyTime, startTimeUs); CHECK_GE(*startTimeUs, 0); #ifndef ANDROID_DEFAULT_CODE if (options != NULL) { ALOGI("seek done, startTimeUs:%lld", *startTimeUs); } #endif extractAndAppendLocalDescriptions(*startTimeUs, textBuffer, parcel); textBuffer->release(); // endTimeUs is a dummy parameter for 3gpp timed text format. // Set a negative value to it to mark it is unavailable. *endTimeUs = -1; return OK; }
static VideoFrame *extractVideoFrameWithCodecFlags( OMXClient *client, const sp<MetaData> &trackMeta, const sp<MediaSource> &source, uint32_t flags, int64_t frameTimeUs, int seekMode) { sp<MetaData> format = source->getFormat(); #ifndef MTK_HARDWARE // XXX: // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can // remove this check and always set the decoder output color format // skip this check for software decoders #ifndef QCOM_HARDWARE if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); #else if (!(flags & OMXCodec::kSoftwareCodecsOnly)) { if (isYUV420PlanarSupported(client, trackMeta)) { format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); } #endif } #endif sp<MediaSource> decoder = OMXCodec::Create( client->interface(), format, false, source, NULL, flags | OMXCodec::kClientNeedsFramebuffer); if (decoder.get() == NULL) { ALOGV("unable to instantiate video decoder."); return NULL; } status_t err = decoder->start(); if (err != OK) { ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); return NULL; } // Read one output buffer, ignore format change notifications // and spurious empty buffers. MediaSource::ReadOptions options; if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC || seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) { ALOGE("Unknown seek mode: %d", seekMode); return NULL; } MediaSource::ReadOptions::SeekMode mode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; if (frameTimeUs < 0) { if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime) || thumbNailTime < 0) { thumbNailTime = 0; } options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; options.setSeekTo(frameTimeUs, mode); } MediaBuffer *buffer = NULL; do { if (buffer != NULL) { buffer->release(); buffer = NULL; } err = decoder->read(&buffer, &options); options.clearSeekTo(); } while (err == INFO_FORMAT_CHANGED || (buffer != NULL && buffer->range_length() == 0)); if (err != OK) { CHECK(buffer == NULL); ALOGV("decoding frame failed."); decoder->stop(); return NULL; } ALOGV("successfully decoded video frame."); int32_t unreadable; if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) && unreadable != 0) { ALOGV("video frame is unreadable, decoder does not give us access " "to the video data."); buffer->release(); buffer = NULL; decoder->stop(); return NULL; } int64_t timeUs; CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); if (thumbNailTime >= 0) { if (timeUs != thumbNailTime) { const char *mime; CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", thumbNailTime, timeUs, mime); } } sp<MetaData> meta = decoder->getFormat(); int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); int32_t crop_left, crop_top, crop_right, crop_bottom; if (!meta->findRect( kKeyCropRect, &crop_left, &crop_top, &crop_right, &crop_bottom)) { crop_left = crop_top = 0; crop_right = width - 1; crop_bottom = height - 1; } int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; frame->mWidth = crop_right - crop_left + 1; frame->mHeight = crop_bottom - crop_top + 1; frame->mDisplayWidth = frame->mWidth; frame->mDisplayHeight = frame->mHeight; frame->mSize = frame->mWidth * frame->mHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; int32_t displayWidth, displayHeight; if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) { frame->mDisplayWidth = displayWidth; } if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) { frame->mDisplayHeight = displayHeight; } int32_t srcFormat; CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); #ifdef MTK_HARDWARE { int32_t Stridewidth,SliceHeight; CHECK(meta->findInt32(kKeyStride, &Stridewidth)); CHECK(meta->findInt32(kKeySliceHeight, &SliceHeight)); ALOGD("kKeyWidth=%d,kKeyHeight=%d",width,height); ALOGD("Stridewidth=%d,SliceHeight=%d",Stridewidth,SliceHeight); width=Stridewidth; height=SliceHeight; } #endif ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); if (converter.isValid()) { err = converter.convert( (const uint8_t *)buffer->data() + buffer->range_offset(), width, height, crop_left, crop_top, crop_right, crop_bottom, frame->mData, frame->mWidth, frame->mHeight, 0, 0, frame->mWidth - 1, frame->mHeight - 1); } else { ALOGE("Unable to instantiate color conversion from format 0x%08x to " "RGB565", srcFormat); err = ERROR_UNSUPPORTED; } buffer->release(); buffer = NULL; decoder->stop(); if (err != OK) { ALOGE("Colorconverter failed to convert frame."); delete frame; frame = NULL; } return frame; } VideoFrame *StagefrightMetadataRetriever::getFrameAtTime( int64_t timeUs, int option) { ALOGV("getFrameAtTime: %lld us option: %d", timeUs, option); if (mExtractor.get() == NULL) { ALOGV("no extractor."); return NULL; } sp<MetaData> fileMeta = mExtractor->getMetaData(); if (fileMeta == NULL) { ALOGV("extractor doesn't publish metadata, failed to initialize?"); return NULL; } int32_t drm = 0; if (fileMeta->findInt32(kKeyIsDRM, &drm) && drm != 0) { ALOGE("frame grab not allowed."); return NULL; } size_t n = mExtractor->countTracks(); size_t i; for (i = 0; i < n; ++i) { sp<MetaData> meta = mExtractor->getTrackMetaData(i); const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); if (!strncasecmp(mime, "video/", 6)) { break; } } if (i == n) { ALOGV("no video track found."); return NULL; } sp<MetaData> trackMeta = mExtractor->getTrackMetaData( i, MediaExtractor::kIncludeExtensiveMetaData); sp<MediaSource> source = mExtractor->getTrack(i); if (source.get() == NULL) { ALOGV("unable to instantiate video track."); return NULL; } const void *data; uint32_t type; size_t dataSize; if (fileMeta->findData(kKeyAlbumArt, &type, &data, &dataSize) && mAlbumArt == NULL) { mAlbumArt = new MediaAlbumArt; mAlbumArt->mSize = dataSize; mAlbumArt->mData = new uint8_t[dataSize]; memcpy(mAlbumArt->mData, data, dataSize); } VideoFrame *frame = extractVideoFrameWithCodecFlags( #ifndef QCOM_HARDWARE &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs, #else &mClient, trackMeta, source, OMXCodec::kSoftwareCodecsOnly, #endif timeUs, option); if (frame == NULL) { ALOGV("Software decoder failed to extract thumbnail, " "trying hardware decoder."); frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0, timeUs, option); } return frame; }
status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) { *out = NULL; MediaBuffer *buffer = NULL; int64_t timeUs = -1; for (;;) { size_t i; size_t packetSize = 0; bool gotFullPacket = false; for (i = mNextLaceIndex; i < mCurrentPage.mNumSegments; ++i) { uint8_t lace = mCurrentPage.mLace[i]; packetSize += lace; if (lace < 255) { gotFullPacket = true; ++i; break; } } if (mNextLaceIndex < mCurrentPage.mNumSegments) { off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments; for (size_t j = 0; j < mNextLaceIndex; ++j) { dataOffset += mCurrentPage.mLace[j]; } size_t fullSize = packetSize; if (buffer != NULL) { fullSize += buffer->range_length(); } MediaBuffer *tmp = new MediaBuffer(fullSize); if (buffer != NULL) { memcpy(tmp->data(), buffer->data(), buffer->range_length()); tmp->set_range(0, buffer->range_length()); buffer->release(); } else { // XXX Not only is this not technically the correct time for // this packet, we also stamp every packet in this page // with the same time. This needs fixing later. if (mVi.rate) { // Rate may not have been initialized yet if we're currently // reading the configuration packets... // Fortunately, the timestamp doesn't matter for those. timeUs = mCurrentPage.mGranulePosition * 1000000ll / mVi.rate; } tmp->set_range(0, 0); } buffer = tmp; ssize_t n = mSource->readAt( dataOffset, (uint8_t *)buffer->data() + buffer->range_length(), packetSize); if (n < (ssize_t)packetSize) { LOGV("failed to read %d bytes at 0x%016llx, got %ld bytes", packetSize, dataOffset, n); return ERROR_IO; } buffer->set_range(0, fullSize); mNextLaceIndex = i; if (gotFullPacket) { // We've just read the entire packet. if (timeUs >= 0) { buffer->meta_data()->setInt64(kKeyTime, timeUs); } if (mFirstPacketInPage) { buffer->meta_data()->setInt32( kKeyValidSamples, mCurrentPageSamples); mFirstPacketInPage = false; } *out = buffer; return OK; } // fall through, the buffer now contains the start of the packet. } CHECK_EQ(mNextLaceIndex, mCurrentPage.mNumSegments); mOffset += mCurrentPageSize; ssize_t n = readPage(mOffset, &mCurrentPage); if (n <= 0) { if (buffer) { buffer->release(); buffer = NULL; } LOGV("readPage returned %ld", n); return n < 0 ? n : (status_t)ERROR_END_OF_STREAM; } mCurrentPageSamples = mCurrentPage.mGranulePosition - mPrevGranulePosition; mFirstPacketInPage = true; mPrevGranulePosition = mCurrentPage.mGranulePosition; mCurrentPageSize = n; mNextLaceIndex = 0; if (buffer != NULL) { if ((mCurrentPage.mFlags & 1) == 0) { // This page does not continue the packet, i.e. the packet // is already complete. if (timeUs >= 0) { buffer->meta_data()->setInt64(kKeyTime, timeUs); } buffer->meta_data()->setInt32( kKeyValidSamples, mCurrentPageSamples); mFirstPacketInPage = false; *out = buffer; return OK; } } } }
int main(int argc, char **argv) { android::ProcessState::self()->startThreadPool(); DataSource::RegisterDefaultSniffers(); #if 1 if (argc != 2) { fprintf(stderr, "usage: %s filename\n", argv[0]); return 1; } OMXClient client; CHECK_EQ(client.connect(), OK); #if 1 sp<MediaSource> source = createSource(argv[1]); if (source == NULL) { fprintf(stderr, "Unable to find a suitable video track.\n"); return 1; } sp<MetaData> meta = source->getFormat(); sp<MediaSource> decoder = OMXCodec::Create( client.interface(), meta, false /* createEncoder */, source); int width, height; bool success = meta->findInt32(kKeyWidth, &width); success = success && meta->findInt32(kKeyHeight, &height); CHECK(success); #else int width = 800; int height = 480; sp<MediaSource> decoder = new DummySource(width, height); #endif sp<MetaData> enc_meta = new MetaData; // enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); enc_meta->setInt32(kKeyWidth, width); enc_meta->setInt32(kKeyHeight, height); sp<MediaSource> encoder = OMXCodec::Create( client.interface(), enc_meta, true /* createEncoder */, decoder); #if 1 sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/output.mp4"); writer->addSource(encoder); writer->start(); while (!writer->reachedEOS()) { usleep(100000); } writer->stop(); #else encoder->start(); MediaBuffer *buffer; while (encoder->read(&buffer) == OK) { int32_t isSync; if (!buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)) { isSync = false; } printf("got an output frame of size %d%s\n", buffer->range_length(), isSync ? " (SYNC)" : ""); buffer->release(); buffer = NULL; } encoder->stop(); #endif client.disconnect(); #endif #if 0 CameraSource *source = CameraSource::Create(); printf("source = %p\n", source); for (int i = 0; i < 100; ++i) { MediaBuffer *buffer; status_t err = source->read(&buffer); CHECK_EQ(err, OK); printf("got a frame, data=%p, size=%d\n", buffer->data(), buffer->range_length()); buffer->release(); buffer = NULL; } delete source; source = NULL; #endif return 0; }
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatStart: { status_t err; if (mIsAudio) { // This atrocity causes AudioSource to deliver absolute // systemTime() based timestamps (off by 1 us). sp<MetaData> params = new MetaData; params->setInt64(kKeyTime, 1ll); err = mSource->start(params.get()); } else { err = mSource->start(); if (err != OK) { ALOGE("source failed to start w/ err %d", err); } } if (err == OK) { schedulePull(); } sp<AMessage> response = new AMessage; response->setInt32("err", err); uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; } case kWhatStop: { sp<MetaData> meta = mSource->getFormat(); const char *tmp; CHECK(meta->findCString(kKeyMIMEType, &tmp)); AString mime = tmp; ALOGI("MediaPuller(%s) stopping.", mime.c_str()); mSource->stop(); ALOGI("MediaPuller(%s) stopped.", mime.c_str()); ++mPullGeneration; sp<AMessage> notify; CHECK(msg->findMessage("notify", ¬ify)); notify->post(); break; } case kWhatPull: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mPullGeneration) { break; } MediaBuffer *mbuf; status_t err = mSource->read(&mbuf); if (mPaused) { if (err == OK) { mbuf->release(); mbuf = NULL; } schedulePull(); break; } if (err != OK) { if (err == ERROR_END_OF_STREAM) { ALOGI("stream ended."); } else { ALOGE("error %d reading stream.", err); } sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatEOS); notify->post(); } else { int64_t timeUs; CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs)); sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length()); memcpy(accessUnit->data(), (const uint8_t *)mbuf->data() + mbuf->range_offset(), mbuf->range_length()); accessUnit->meta()->setInt64("timeUs", timeUs); if (mIsAudio) { mbuf->release(); mbuf = NULL; } else { // video encoder will release MediaBuffer when done // with underlying data. accessUnit->setMediaBufferBase(mbuf); } sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatAccessUnit); notify->setBuffer("accessUnit", accessUnit); notify->post(); if (mbuf != NULL) { ALOGV("posted mbuf %p", mbuf); } schedulePull(); } break; } case kWhatPause: { mPaused = true; break; } case kWhatResume: { mPaused = false; break; } default: TRESPASS(); } }