コード例 #1
0
static void dumpSource(const sp<MediaSource> &source, const String8 &filename) {
    FILE *out = fopen(filename.string(), "wb");

    CHECK_EQ((status_t)OK, source->start());

    status_t err;
    for (;;) {
        MediaBuffer *mbuf;
        err = source->read(&mbuf);

        if (err == INFO_FORMAT_CHANGED) {
            continue;
        } else if (err != OK) {
            break;
        }

        CHECK_EQ(
                fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(),
                       1,
                       mbuf->range_length(),
                       out),
                (ssize_t)mbuf->range_length());

        mbuf->release();
        mbuf = NULL;
    }

    CHECK_EQ((status_t)OK, source->stop());

    fclose(out);
    out = NULL;
}
コード例 #2
0
bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs)
{
  MOZ_ASSERT(aSeekTimeUs >= -1);

  status_t err;
  if (mAudioMetadataRead && aSeekTimeUs == -1) {
    // Use the data read into the buffer during metadata time
    err = OK;
  }
  else {
    ReleaseAudioBuffer();
    if (aSeekTimeUs != -1) {
      MediaSource::ReadOptions options;
      options.setSeekTo(aSeekTimeUs);
      err = mAudioSource->read(&mAudioBuffer, &options);
    } else {
      err = mAudioSource->read(&mAudioBuffer);
    }
  }
  mAudioMetadataRead = false;

  aSeekTimeUs = -1;

  if (err == OK && mAudioBuffer->range_length() != 0) {
    int64_t timeUs;
    if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
      LOG("no frame time");
      return false;
    }

    if (timeUs < 0) {
      LOG("frame time %lld must be nonnegative", timeUs);
      return false;
    }

    return ToAudioFrame(aFrame, timeUs,
                        mAudioBuffer->data(),
                        mAudioBuffer->range_offset(),
                        mAudioBuffer->range_length(),
                        mAudioChannels, mAudioSampleRate);
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    LOG("mAudioSource INFO_FORMAT_CHANGED");
    if (!SetAudioFormat())
      return false;
    else
      return ReadAudio(aFrame, aSeekTimeUs);
  }
  else if (err == ERROR_END_OF_STREAM) {
    LOG("mAudioSource END_OF_STREAM");
  }
  else if (err != OK) {
    LOG("mAudioSource ERROR %#x", err);
  }

  return err == OK;
}
コード例 #3
0
status_t DummyVideoSource::read(
        MediaBuffer **out,
        const MediaSource::ReadOptions *options) {

    ALOGV("read: E");

    const int32_t kTimeScale = 1000;  /* time scale in ms */
    bool seeking = false;
    int64_t seekTimeUs;
    ReadOptions::SeekMode seekMode;
    if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
        seeking = true;
        mImageSeekTime = seekTimeUs;
        M4OSA_clockGetTime(&mImagePlayStartTime, kTimeScale);
    }

    if ((mImageSeekTime == mImageClipDuration) ||
        (mFrameTimeUs == (int64_t)mImageClipDuration)) {
        ALOGV("read: EOS reached");
        *out = NULL;
        return ERROR_END_OF_STREAM;
    }

    status_t err = OK;
    MediaBuffer *buffer = new MediaBuffer(
            mImageBuffer, (mFrameWidth * mFrameHeight * 1.5));

    // Set timestamp of buffer
    if (mIsFirstImageFrame) {
        M4OSA_clockGetTime(&mImagePlayStartTime, kTimeScale);
        mFrameTimeUs =  (mImageSeekTime + 1);
        ALOGV("read: jpg 1st frame timeUs = %lld, begin cut time = %ld",
            mFrameTimeUs, mImageSeekTime);

        mIsFirstImageFrame = false;
    } else {
        M4OSA_Time  currentTimeMs;
        M4OSA_clockGetTime(&currentTimeMs, kTimeScale);

        mFrameTimeUs = mImageSeekTime +
            (currentTimeMs - mImagePlayStartTime) * 1000LL;

        ALOGV("read: jpg frame timeUs = %lld", mFrameTimeUs);
    }

    buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);
    buffer->set_range(buffer->range_offset(),
                mFrameWidth * mFrameHeight * 1.5);

    *out = buffer;
    return err;
}
status_t DummyVideoSource::read(
                        MediaBuffer **out,
                        const MediaSource::ReadOptions *options) {
    status_t err = OK;
    MediaBuffer *buffer;
    LOG2("DummyVideoSource::read START");

    bool seeking = false;
    int64_t seekTimeUs;
    ReadOptions::SeekMode seekMode;

    if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
        seeking = true;
        mImageSeekTime = seekTimeUs;
        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms
    }

    if ((mImageSeekTime == mImageClipDuration) || (mFrameTimeUs == (int64_t)mImageClipDuration)) {
        LOG2("DummyVideoSource::read() End of stream reached; return NULL buffer");
        *out = NULL;
        return ERROR_END_OF_STREAM;
    }

    buffer = new MediaBuffer(mImageBuffer, (mFrameWidth*mFrameHeight*1.5));

    //set timestamp of buffer
    if (mIsFirstImageFrame) {
        M4OSA_clockGetTime(&mImagePlayStartTime, 1000); //1000 time scale for time in ms
        mFrameTimeUs =  (mImageSeekTime + 1);
        LOG2("DummyVideoSource::read() jpg 1st frame timeUs = %lld, begin cut time = %ld", mFrameTimeUs, mImageSeekTime);
        mIsFirstImageFrame = false;
    } else {
        M4OSA_Time  currentTimeMs;
        M4OSA_clockGetTime(&currentTimeMs, 1000);

        mFrameTimeUs = mImageSeekTime + (currentTimeMs - mImagePlayStartTime)*1000;
        LOG2("DummyVideoSource::read() jpg frame timeUs = %lld", mFrameTimeUs);
    }
    buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);
    buffer->set_range(buffer->range_offset(), mFrameWidth*mFrameHeight*1.5);
    *out = buffer;
    return err;
}
コード例 #5
0
bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs)
{
  status_t err;

  if (mAudioMetadataRead && aSeekTimeUs == -1) {
    // Use the data read into the buffer during metadata time
    err = OK;
  }
  else {
    ReleaseAudioBuffer();
    if (aSeekTimeUs != -1) {
      MediaSource::ReadOptions options;
      options.setSeekTo(aSeekTimeUs);
      err = mAudioSource->read(&mAudioBuffer, &options);
    } else {
      err = mAudioSource->read(&mAudioBuffer);
    }
  }
  mAudioMetadataRead = false;

  aSeekTimeUs = -1;

  if (err == OK && mAudioBuffer->range_length() != 0) {
    int64_t timeUs;
    if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs))
      return false;

    return ToAudioFrame(aFrame, timeUs,
                        mAudioBuffer->data(),
                        mAudioBuffer->range_offset(),
                        mAudioBuffer->range_length(),
                        mAudioChannels, mAudioSampleRate);
  }
  else if (err == INFO_FORMAT_CHANGED && !SetAudioFormat()) {
    // If the format changed, update our cached info.
    return false;
  }
  else if (err == ERROR_END_OF_STREAM)
    return false;

  return true;
}
static MediaBuffer* readBufferFromReader(
        VideoEditorAudioDecoder_Context* pDecContext) {
    M4OSA_ERR lerr = M4NO_ERROR;
    M4_AccessUnit* pAccessUnit = pDecContext->m_pNextAccessUnitToDecode;

    // Get next AU from reader.
    lerr = pDecContext->m_pReader->m_pFctGetNextAu(
               pDecContext->m_pReader->m_readerContext,
               (M4_StreamHandler*)pDecContext->mAudioStreamHandler,
               pAccessUnit);

    if (lerr == M4WAR_NO_MORE_AU) {
        LOGV("readBufferFromReader : EOS");
        return NULL;
    }

    pDecContext->timeStampMs = pAccessUnit->m_CTS;

    MediaBuffer* newBuffer = new MediaBuffer((size_t)pAccessUnit->m_size);
    memcpy((void *)((M4OSA_Int8*)newBuffer->data() + newBuffer->range_offset()),
        (void *)pAccessUnit->m_dataAddress, pAccessUnit->m_size);
    newBuffer->meta_data()->setInt64(kKeyTime, (pAccessUnit->m_CTS * 1000LL));
    return newBuffer;
}
コード例 #7
0
static VideoFrame *extractVideoFrameWithCodecFlags(
        OMXClient *client,
        const sp<MetaData> &trackMeta,
        const sp<MediaSource> &source,
        uint32_t flags,
        int64_t frameTimeUs,
        int seekMode) {

    sp<MetaData> format = source->getFormat();

    // XXX:
    // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can
    // remove this check and always set the decoder output color format
    if (isYUV420PlanarSupported(client, trackMeta)) {
        format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
    }

    sp<MediaSource> decoder =
        OMXCodec::Create(
                client->interface(), format, false, source,
                NULL, flags | OMXCodec::kClientNeedsFramebuffer);

    if (decoder.get() == NULL) {
        ALOGV("unable to instantiate video decoder.");

        return NULL;
    }

    status_t err = decoder->start();
    if (err != OK) {
        ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err);
        return NULL;
    }

    // Read one output buffer, ignore format change notifications
    // and spurious empty buffers.

    MediaSource::ReadOptions options;
    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {

        ALOGE("Unknown seek mode: %d", seekMode);
        return NULL;
    }

    MediaSource::ReadOptions::SeekMode mode =
            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);

    int64_t thumbNailTime;
    if (frameTimeUs < 0) {
        if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
                || thumbNailTime < 0) {
            thumbNailTime = 0;
        }
        options.setSeekTo(thumbNailTime, mode);
    } else {
        thumbNailTime = -1;
        options.setSeekTo(frameTimeUs, mode);
    }

    MediaBuffer *buffer = NULL;
    do {
        if (buffer != NULL) {
            buffer->release();
            buffer = NULL;
        }
        err = decoder->read(&buffer, &options);
        options.clearSeekTo();
    } while (err == INFO_FORMAT_CHANGED
             || (buffer != NULL && buffer->range_length() == 0));

    if (err != OK) {
        CHECK(buffer == NULL);

        ALOGV("decoding frame failed.");
        decoder->stop();

        return NULL;
    }

    ALOGV("successfully decoded video frame.");

    int32_t unreadable;
    if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)
            && unreadable != 0) {
        ALOGV("video frame is unreadable, decoder does not give us access "
             "to the video data.");

        buffer->release();
        buffer = NULL;

        decoder->stop();

        return NULL;
    }

    int64_t timeUs;
    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
    if (thumbNailTime >= 0) {
        if (timeUs != thumbNailTime) {
            const char *mime;
            CHECK(trackMeta->findCString(kKeyMIMEType, &mime));

            ALOGV("thumbNailTime = %" PRId64 " us, timeUs = %" PRId64 " us, mime = %s",
                 thumbNailTime, timeUs, mime);
        }
    }

    sp<MetaData> meta = decoder->getFormat();

    int32_t width, height;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));

    int32_t crop_left, crop_top, crop_right, crop_bottom;
    if (!meta->findRect(
                kKeyCropRect,
                &crop_left, &crop_top, &crop_right, &crop_bottom)) {
        crop_left = crop_top = 0;
        crop_right = width - 1;
        crop_bottom = height - 1;
    }

    int32_t rotationAngle;
    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
        rotationAngle = 0;  // By default, no rotation
    }

    VideoFrame *frame = new VideoFrame;
    frame->mWidth = crop_right - crop_left + 1;
    frame->mHeight = crop_bottom - crop_top + 1;
    frame->mDisplayWidth = frame->mWidth;
    frame->mDisplayHeight = frame->mHeight;
    frame->mSize = frame->mWidth * frame->mHeight * 2;
    frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;

    int32_t displayWidth, displayHeight;
    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
        frame->mDisplayWidth = displayWidth;
    }
    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
        frame->mDisplayHeight = displayHeight;
    }

    int32_t srcFormat;
    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));

    ColorConverter converter(
            (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);

    if (converter.isValid()) {
        err = converter.convert(
                (const uint8_t *)buffer->data() + buffer->range_offset(),
                width, height,
                crop_left, crop_top, crop_right, crop_bottom,
                frame->mData,
                frame->mWidth,
                frame->mHeight,
                0, 0, frame->mWidth - 1, frame->mHeight - 1);
    } else {
        ALOGE("Unable to instantiate color conversion from format 0x%08x to "
              "RGB565",
              srcFormat);

        err = ERROR_UNSUPPORTED;
    }

    buffer->release();
    buffer = NULL;

    decoder->stop();

    if (err != OK) {
        ALOGE("Colorconverter failed to convert frame.");

        delete frame;
        frame = NULL;
    }

    return frame;
}
コード例 #8
0
status_t PCMSource::read(
        MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;
    int64_t seekTimeUs;
    ReadOptions::SeekMode seek = ReadOptions::SEEK_CLOSEST_SYNC;
    if (options != NULL && options->getSeekTo(&seekTimeUs,&seek)) {
        int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * 2;
        if (pos > mSize) {
            pos = mSize;
        }
        mCurrentPos = pos + mOffset;
    }

    MediaBuffer *buffer;
    status_t err = mGroup->acquire_buffer(&buffer);
    if (err != OK) {
        return err;
    }

    ssize_t n = mDataSource->readAt(
            mCurrentPos, buffer->data(), mBufferSize);
    if (n <= 0) {
        buffer->release();
        buffer = NULL;
        return ERROR_END_OF_STREAM;
    }

    mCurrentPos += n;

    buffer->set_range(0, n);

    if (mBitsPerSample == 8) {
        // Convert 8-bit unsigned samples to 16-bit signed.

        MediaBuffer *tmp;
        CHECK_EQ(mGroup->acquire_buffer(&tmp), OK);

        // The new buffer holds the sample number of samples, but each
        // one is 2 bytes wide.
        tmp->set_range(0, 2 * n);

        int16_t *dst = (int16_t *)tmp->data();
        const uint8_t *src = (const uint8_t *)buffer->data();
        while (n-- > 0) {
            *dst++ = ((int16_t)(*src) - 128) * 256;
            ++src;
        }

        buffer->release();
        buffer = tmp;
    } else if (mBitsPerSample == 24) {
        // Convert 24-bit signed samples to 16-bit signed.

        const uint8_t *src =
            (const uint8_t *)buffer->data() + buffer->range_offset();
        int16_t *dst = (int16_t *)src;

        size_t numSamples = buffer->range_length() / 3;
        for (size_t i = 0; i < numSamples; ++i) {
            int32_t x = (int32_t)(src[0] | src[1] << 8 | src[2] << 16);
            x = (x << 8) >> 8;  // sign extension

            x = x >> 8;
            *dst++ = (int16_t)x;
            src += 3;
        }

        buffer->set_range(buffer->range_offset(), 2 * numSamples);
    }

    size_t bytesPerSample = mBitsPerSample >> 3;

    buffer->meta_data()->setInt64(
            kKeyTime,
            1000000LL * (mCurrentPos - mOffset)
                / (mNumChannels * bytesPerSample) / mSampleRate);


    *out = buffer;

    return OK;
}
static VideoFrame *extractVideoFrameWithCodecFlags(
        OMXClient *client,
        const sp<MetaData> &trackMeta,
        const sp<MediaSource> &source, uint32_t flags) {
    sp<MediaSource> decoder =
        OMXCodec::Create(
                client->interface(), source->getFormat(), false, source,
                NULL, flags | OMXCodec::kClientNeedsFramebuffer);

    if (decoder.get() == NULL) {
        LOGV("unable to instantiate video decoder.");

        return NULL;
    }

    status_t err = decoder->start();
    if (err != OK) {
        LOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err);
        return NULL;
    }

    // Read one output buffer, ignore format change notifications
    // and spurious empty buffers.

    MediaSource::ReadOptions options;
    int64_t thumbNailTime;
    if (trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) {
        options.setSeekTo(thumbNailTime);
    } else {
        thumbNailTime = -1;
    }

    MediaBuffer *buffer = NULL;
    do {
        if (buffer != NULL) {
            buffer->release();
            buffer = NULL;
        }
        err = decoder->read(&buffer, &options);
        options.clearSeekTo();
    } while (err == INFO_FORMAT_CHANGED
             || (buffer != NULL && buffer->range_length() == 0));

    if (err != OK) {
        CHECK_EQ(buffer, NULL);

        LOGV("decoding frame failed.");
        decoder->stop();

        return NULL;
    }

    LOGV("successfully decoded video frame.");

    int32_t unreadable;
    if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)
            && unreadable != 0) {
        LOGV("video frame is unreadable, decoder does not give us access "
             "to the video data.");

        buffer->release();
        buffer = NULL;

        decoder->stop();

        return NULL;
    }

    int64_t timeUs;
    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
    if (thumbNailTime >= 0) {
        if (timeUs != thumbNailTime) {
            const char *mime;
            CHECK(trackMeta->findCString(kKeyMIMEType, &mime));

            LOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
                 thumbNailTime, timeUs, mime);
        }
    }

    sp<MetaData> meta = decoder->getFormat();

    int32_t width, height;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));

    int32_t rotationAngle;
    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
        rotationAngle = 0;  // By default, no rotation
    }

    VideoFrame *frame = new VideoFrame;
    frame->mWidth = width;
    frame->mHeight = height;
    frame->mDisplayWidth = width;
    frame->mDisplayHeight = height;
    frame->mSize = width * height * 2;
    frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;

    int32_t srcFormat;
    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));

    ColorConverter converter(
            (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
    CHECK(converter.isValid());

    converter.convert(
            width, height,
            (const uint8_t *)buffer->data() + buffer->range_offset(),
            0,
            frame->mData, width * 2);

    buffer->release();
    buffer = NULL;

    decoder->stop();

    return frame;
}
コード例 #10
0
status_t AMRWriter::threadFunc() {
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    bool stoppedPrematurely = true;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;

    prctl(PR_SET_NAME, (unsigned long)"AMRWriter", 0, 0, 0);
    while (!mDone) {
        MediaBuffer *buffer;
        err = mSource->read(&buffer);

        if (err != OK) {
            break;
        }

        if (mPaused) {
            buffer->release();
            buffer = NULL;
            continue;
        }

        mEstimatedSizeBytes += buffer->range_length();
        if (exceedsFileSizeLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        int64_t timestampUs;
        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
        if (timestampUs > mEstimatedDurationUs) {
            mEstimatedDurationUs = timestampUs;
        }
        if (mResumed) {
            previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000);
            mResumed = false;
        }
        timestampUs -= previousPausedDurationUs;
        ALOGV("time stamp: %lld, previous paused duration: %lld",
                timestampUs, previousPausedDurationUs);
        if (timestampUs > maxTimestampUs) {
            maxTimestampUs = timestampUs;
        }

        if (exceedsFileDurationLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }
        ssize_t n = write(mFd,
                        (const uint8_t *)buffer->data() + buffer->range_offset(),
                        buffer->range_length());

        if (n < (ssize_t)buffer->range_length()) {
            buffer->release();
            buffer = NULL;

            break;
        }

        // XXX: How to tell it is stopped prematurely?
        if (stoppedPrematurely) {
            stoppedPrematurely = false;
        }

        buffer->release();
        buffer = NULL;
    }

    if (stoppedPrematurely) {
        notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
    }

    close(mFd);
    mFd = -1;
    mReachedEOS = true;
    if ((err == ERROR_END_OF_STREAM)||(err = -ETIMEDOUT)) {
        return OK;
    }
    return err;
}
コード例 #11
0
ファイル: CEncoderLame.cpp プロジェクト: dAck2cC2/m3e
int
CEncoderLame::encode(const sp<IMediaSource>& pMediaSource_in, const sp<IAudioSink>& pAudioSink_out)
{
    AUTO_LOG();

    CHECK_PTR_EXT(m_pGobalFlags, BAD_VALUE);
    CHECK_PTR_EXT(pMediaSource_in, BAD_VALUE);
    CHECK_PTR_EXT(pAudioSink_out, BAD_VALUE);

    sp<MetaData>  pMeta = pMediaSource_in->getFormat();
    CHECK_PTR_EXT(pMeta,  BAD_VALUE);

    int32_t iChannelNum = 0;
    bool chk = pMeta->findInt32(kKeyChannelCount, &iChannelNum);
    CHECK_IS_EXT((true == chk), UNKNOWN_ERROR);
    int  ret = lame_set_num_channels(m_pGobalFlags, iChannelNum);
    CHECK_IS_EXT((ret == OK), ret);

    // only support one channel and two channels
    CHECK_IS_EXT(((1 == iChannelNum) || (2 == iChannelNum)), INVALID_OPERATION);

    ret = pMediaSource_in->start();
    CHECK_IS_EXT((ret == OK), ret);

    MediaBuffer* pBuf = NULL;

    while (OK == (pMediaSource_in->read(&pBuf, NULL))) {
        if (pBuf == NULL) {
            break;
        }

        if ((pBuf->data() == NULL) || (pBuf->range_length() == 0)) {
            pBuf->release();
            pBuf = NULL;
            continue;
        }

        int16_t *pOrg = (int16_t *)((const char *)pBuf->data() + pBuf->range_offset());
        ssize_t iSamplesRead = pBuf->range_length() / 2;
        pOrg += iSamplesRead;
        iSamplesRead /= iChannelNum;

        CHECK_NE(iSamplesRead, 0);

        int iMP3BufSize = 1.25 * iSamplesRead + 7200;
        short int*     pPCMBufL = new short int[iSamplesRead];
        short int*     pPCMBufR = new short int[iSamplesRead];
        unsigned char* pMP3Buf  = new unsigned char[iMP3BufSize];

        if (iChannelNum == 2) {
            for (ssize_t i = iSamplesRead; --i >= 0;) {
                pPCMBufR[i] = *--pOrg;
                pPCMBufL[i] = *--pOrg;
            }
        } else if (iChannelNum == 1) {
            memset(pPCMBufR, 0, iSamplesRead * sizeof(short int));

            for (ssize_t i = iSamplesRead; --i >= 0;) {
                pPCMBufL[i] = *--pOrg;
            }
        } else {
            // EMPTY
        }

        int iOutSize = lame_encode_buffer(m_pGobalFlags, pPCMBufL, pPCMBufR, iSamplesRead, pMP3Buf, iMP3BufSize);
        int iWriteSize = iOutSize;

        if (iOutSize > 0) {
            iWriteSize = pAudioSink_out->write(pMP3Buf, iOutSize);
        }

        delete (pMP3Buf);
        delete (pPCMBufR);
        delete (pPCMBufL);


        pBuf->release();
        pBuf = NULL;

        CHECK_IS_EXT((iOutSize == iWriteSize), UNKNOWN_ERROR);
    }

    // last frame may remain
    {
        unsigned char* pMP3Buf  = new unsigned char[LAME_MAXMP3BUFFER];
        int iOutSize = lame_encode_flush(m_pGobalFlags, pMP3Buf, sizeof(pMP3Buf));
        int iWriteSize = iOutSize;

        if (iOutSize > 0) {
            iWriteSize = pAudioSink_out->write(pMP3Buf, iOutSize);
        }

        delete (pMP3Buf);

        CHECK_IS_EXT((iOutSize == iWriteSize), UNKNOWN_ERROR);
    }

    // write the tag3v1
    {
        unsigned char* pMP3Buf = new unsigned char[128];
        int iOutSize = lame_get_id3v1_tag(m_pGobalFlags, pMP3Buf, sizeof(pMP3Buf));
        int iWriteSize = iOutSize;

        if ((iOutSize > 0) && (((size_t)iOutSize) <= sizeof(pMP3Buf))) {
            iWriteSize = pAudioSink_out->write(pMP3Buf, iOutSize);
        }

        delete (pMP3Buf);

        CHECK_IS_EXT((iOutSize == iWriteSize), UNKNOWN_ERROR);
    }

    RETURN(OK);
}
static VideoFrame *extractVideoFrameWithCodecFlags(
        OMXClient *client,
        const sp<MetaData> &trackMeta,
        const sp<MediaSource> &source,
        uint32_t flags,
        int64_t frameTimeUs,
        int seekMode) {

#ifdef OMAP_ENHANCEMENT
    flags |= OMXCodec::kPreferThumbnailMode;
#ifdef TARGET_OMAP4

    int32_t isInterlaced = false;

    //Call config parser to update profile,level,interlaced,reference frame data
    updateMetaData(trackMeta);

    trackMeta->findInt32(kKeyVideoInterlaced, &isInterlaced);

    if(isInterlaced)
    {
      flags |= OMXCodec::kPreferInterlacedOutputContent;
    }
#endif
#endif
    sp<MediaSource> decoder =
        OMXCodec::Create(
                client->interface(), source->getFormat(), false, source,
                NULL, flags | OMXCodec::kClientNeedsFramebuffer);

    if (decoder.get() == NULL) {
        LOGV("unable to instantiate video decoder.");

        return NULL;
    }

    status_t err = decoder->start();
    if (err != OK) {
        LOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err);
        return NULL;
    }

    // Read one output buffer, ignore format change notifications
    // and spurious empty buffers.

    MediaSource::ReadOptions options;
    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {

        LOGE("Unknown seek mode: %d", seekMode);
        return NULL;
    }

    MediaSource::ReadOptions::SeekMode mode =
            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);

    int64_t thumbNailTime;
    if (frameTimeUs < 0 && trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) {
        options.setSeekTo(thumbNailTime, mode);
    } else {
        thumbNailTime = -1;
        options.setSeekTo(frameTimeUs < 0 ? 0 : frameTimeUs, mode);
    }

    MediaBuffer *buffer = NULL;
    do {
        if (buffer != NULL) {
            buffer->release();
            buffer = NULL;
        }

        err = decoder->read(&buffer, &options);
#ifdef OMAP_ENHANCEMENT
        if(err == INFO_FORMAT_CHANGED)
        {
            int32_t w1,h1;
            decoder->getFormat()->findInt32(kKeyWidth, &w1);
            decoder->getFormat()->findInt32(kKeyHeight, &h1);
            LOGD("Got portreconfig event. New WxH %dx%d. wait 5mS for port to be enabled",w1,h1);
            usleep(5000); //sleep 5mS for port disable-enable to complete
        }
#endif
        options.clearSeekTo();
    } while (err == INFO_FORMAT_CHANGED
             || (buffer != NULL && buffer->range_length() == 0));

    if (err != OK) {
        CHECK_EQ(buffer, NULL);

        LOGV("decoding frame failed.");
        decoder->stop();

        return NULL;
    }

    LOGV("successfully decoded video frame.");

    int32_t unreadable;
    if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)
            && unreadable != 0) {
        LOGV("video frame is unreadable, decoder does not give us access "
             "to the video data.");

        buffer->release();
        buffer = NULL;

        decoder->stop();

        return NULL;
    }

    int64_t timeUs;
    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
    if (thumbNailTime >= 0) {
        if (timeUs != thumbNailTime) {
            const char *mime;
            CHECK(trackMeta->findCString(kKeyMIMEType, &mime));

            LOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
                 thumbNailTime, timeUs, mime);
        }
    }

    sp<MetaData> meta = decoder->getFormat();

    int32_t width, height;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));

    int32_t rotationAngle;
    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
        rotationAngle = 0;  // By default, no rotation
    }

    VideoFrame *frame = new VideoFrame;

#if defined(OMAP_ENHANCEMENT) && defined(TARGET_OMAP4)
    int32_t srcFormat;
    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));

    int32_t format;
    const char *component;

    //cache the display width and height
    int32_t displayWidth, displayHeight;
    displayWidth = width;
    displayHeight = height;

    //update width & height with the buffer width&height
    if(!(meta->findInt32(kKeyPaddedWidth, &width))) {
        CHECK(meta->findInt32(kKeyWidth, &width));
    }
    if(!(meta->findInt32(kKeyPaddedHeight, &height))) {
        CHECK(meta->findInt32(kKeyHeight, &height));
    }
    LOGD("VideoFrame WxH %dx%d", displayWidth, displayHeight);

    if(((OMX_COLOR_FORMATTYPE)srcFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar) ||
       ((OMX_COLOR_FORMATTYPE)srcFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar_Sequential_TopBottom)){
        frame->mWidth = displayWidth;
        frame->mHeight = displayHeight;
        frame->mDisplayWidth = displayWidth;
        frame->mDisplayHeight = displayHeight;
        frame->mSize = displayWidth * displayHeight * 2;
        frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;
    }else {
        frame->mWidth = width;
        frame->mHeight = height;
        frame->mDisplayWidth = width;
        frame->mDisplayHeight = height;
        frame->mSize = width * height * 2;
        frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;
    }

    if(((OMX_COLOR_FORMATTYPE)srcFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar) ||
       ((OMX_COLOR_FORMATTYPE)srcFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar_Sequential_TopBottom)){

        ColorConverter converter(
                (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);

        CHECK(converter.isValid());

        converter.convert(
                width, height,
                (const uint8_t *)buffer->data() + buffer->range_offset(),
                0, //1D buffer in 1.16 Ducati rls. If 2D buffer -> 4096 stride should be used
                frame->mData, displayWidth * 2,
                displayWidth,displayHeight,buffer->range_offset(),isInterlaced);
    }
    else{

        ColorConverter converter(
                (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);

        CHECK(converter.isValid());

        converter.convert(
                width, height,
                (const uint8_t *)buffer->data() + buffer->range_offset(),
                0,
                frame->mData, width * 2);
    }

#else
    frame->mWidth = width;
    frame->mHeight = height;
    frame->mDisplayWidth = width;
    frame->mDisplayHeight = height;
    frame->mSize = width * height * 2;
    frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;

    int32_t srcFormat;
    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));

    ColorConverter converter(
            (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);
    CHECK(converter.isValid());

    converter.convert(
            width, height,
            (const uint8_t *)buffer->data() + buffer->range_offset(),
            0,
            frame->mData, width * 2);
#endif

    buffer->release();
    buffer = NULL;

    decoder->stop();

    return frame;
}
コード例 #13
0
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatStart:
        {
            status_t err;
            if (mIsAudio) {
                // This atrocity causes AudioSource to deliver absolute
                // systemTime() based timestamps (off by 1 us).
                sp<MetaData> params = new MetaData;
                params->setInt64(kKeyTime, 1ll);
                err = mSource->start(params.get());
            } else {
                err = mSource->start();
                if (err != OK) {
                    ALOGE("source failed to start w/ err %d", err);
                }
            }

            if (err == OK) {
                schedulePull();
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));
            response->postReply(replyID);
            break;
        }

        case kWhatStop:
        {
            sp<MetaData> meta = mSource->getFormat();
            const char *tmp;
            CHECK(meta->findCString(kKeyMIMEType, &tmp));
            AString mime = tmp;

            ALOGI("MediaPuller(%s) stopping.", mime.c_str());
            mSource->stop();
            ALOGI("MediaPuller(%s) stopped.", mime.c_str());
            ++mPullGeneration;

            sp<AMessage> notify;
            CHECK(msg->findMessage("notify", &notify));
            notify->post();
            break;
        }

        case kWhatPull:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mPullGeneration) {
                break;
            }

            MediaBuffer *mbuf;
            status_t err = mSource->read(&mbuf);

            if (mPaused) {
                if (err == OK) {
                    mbuf->release();
                    mbuf = NULL;
                }

                schedulePull();
                break;
            }

            if (err != OK) {
                if (err == ERROR_END_OF_STREAM) {
                    ALOGI("stream ended.");
                } else {
                    ALOGE("error %d reading stream.", err);
                }

                sp<AMessage> notify = mNotify->dup();
                notify->setInt32("what", kWhatEOS);
                notify->post();
            } else {
                int64_t timeUs;
                CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));

                sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());

                memcpy(accessUnit->data(),
                       (const uint8_t *)mbuf->data() + mbuf->range_offset(),
                       mbuf->range_length());

                accessUnit->meta()->setInt64("timeUs", timeUs);

                if (mIsAudio) {
                    mbuf->release();
                    mbuf = NULL;
                } else {
                    // video encoder will release MediaBuffer when done
                    // with underlying data.
                    accessUnit->setMediaBufferBase(mbuf);
                }

                sp<AMessage> notify = mNotify->dup();

                notify->setInt32("what", kWhatAccessUnit);
                notify->setBuffer("accessUnit", accessUnit);
                notify->post();

                if (mbuf != NULL) {
                    ALOGV("posted mbuf %p", mbuf);
                }

                schedulePull();
            }
            break;
        }

        case kWhatPause:
        {
            mPaused = true;
            break;
        }

        case kWhatResume:
        {
            mPaused = false;
            break;
        }

        default:
            TRESPASS();
    }
}
コード例 #14
0
bool LivePhotoSource:: threadLoop() {
	ALOGD("+");
	status_t err = OK;
    MediaBuffer *buffer = NULL;
	int32_t isSync = false;

	while(mSourceStarted && !exitPending() && ((err = mSource->read(&buffer)) == OK)) {
        MediaBuffer* copy = new MediaBuffer(buffer->range_length(), buffer->meta_data());
        memcpy( copy->data(), (uint8_t *)buffer->data() + buffer->range_offset(), buffer->range_length() );
        copy->set_range(0, buffer->range_length());

		int64_t latestTimestampUs = 0;
		//CHECK(copy->meta_data()->findInt64(kKeyTime, &latestTimestampUs));
		copy->meta_data()->findInt64(kKeyTime, &latestTimestampUs);
		ALOGI("cur timestamp is %lldus", latestTimestampUs);
		{
			Mutex::Autolock _lock(mLock);
			
			int32_t isCodecConfig;
			if(copy->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig ) {
				if(mCodecConfigBuffer != NULL) {
					mCodecConfigBuffer->release();
					mCodecConfigBuffer = NULL;
				}
				
				ALOGD("keep codec config buffer");
				mCodecConfigBuffer = copy;
			}
			else {
		        mMediaBufferPool.push_back(copy);

				if(mLivePhotoStarted) {
					mFrameAvailableCond.signal();
					copy->meta_data()->findInt32(kKeyIsSyncFrame, &isSync);
					
					if (!isSync) {
						if (reinterpret_cast<MediaCodecSource *>(mSource.get())->
						                  requestIDRFrame() != OK)
							ALOGW("Send force I cmd fail");
					}
					else {
						mSourceStarted = false;
						buffer->release();
						buffer = NULL;
						break; // 
					}
				}
				else {
					updateBufferPool();
				}
			}
		}

		buffer->release();
		buffer = NULL;
	}

	{
		Mutex::Autolock _lock(mLock);
		if(err != OK) {
			ALOGE("read source err(%d) . this is a bad livephoto", err);
		}
		
		if(mSourceStarted && mLivePhotoStarted) {
			mLivePhotoStarted = false;
			mSourceStarted = false;
			ALOGE("there is an error with exiting while when livephoto started");
			mFrameAvailableCond.signal();
		}
		ALOGD("Thread exit signal");
		mThreadExitCond.signal();
		mIsThreadExit = true;
	}
	
	ALOGD("-");
	return false;
}
コード例 #15
0
ファイル: MediaPuller.cpp プロジェクト: WayWingsDev/mediatek
void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatStart:
        {
            status_t err;
            ALOGI("start mIsAudio=%d",mIsAudio);
            if (mIsAudio) {
                // This atrocity causes AudioSource to deliver absolute
                // systemTime() based timestamps (off by 1 us).
#ifdef MTB_SUPPORT                
                ATRACE_BEGIN_EXT("AudioPuller, kWhatStart");
#endif
                sp<MetaData> params = new MetaData;
                params->setInt64(kKeyTime, 1ll);
                err = mSource->start(params.get());
            } else {
#ifdef MTB_SUPPORT            
                ATRACE_BEGIN_EXT("VideoPuller, kWhatStart");
#endif
                err = mSource->start();
                if (err != OK) {
                    ALOGE("source failed to start w/ err %d", err);
                }
            }

            if (err == OK) {
							 ALOGI("start done, start to schedulePull data");
                schedulePull();
            }

            sp<AMessage> response = new AMessage;
            response->setInt32("err", err);

            uint32_t replyID;
            CHECK(msg->senderAwaitsResponse(&replyID));

            response->postReply(replyID);
#ifdef MTB_SUPPORT			
            ATRACE_END_EXT("VideoPuller, kWhatStart");
#endif
            break;
        }

        case kWhatStop:
        {
            sp<MetaData> meta = mSource->getFormat();
            const char *tmp;
            CHECK(meta->findCString(kKeyMIMEType, &tmp));
            AString mime = tmp;

            ALOGI("MediaPuller(%s) stopping.", mime.c_str());
            mSource->stop();
            ALOGI("MediaPuller(%s) stopped.", mime.c_str());
            ++mPullGeneration;

            sp<AMessage> notify;
            CHECK(msg->findMessage("notify", &notify));
            notify->post();
            break;
        }

        case kWhatPull:
        {
            int32_t generation;
#ifdef MTB_SUPPORT			
            if (mIsAudio) {
                ATRACE_BEGIN_EXT("AudioPuller, kWhatPull");
            } else {
                ATRACE_BEGIN_EXT("VideoPuller, kWhatPull");
            }
#endif			
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mPullGeneration) {
                break;
            }

            MediaBuffer *mbuf;
	 
            status_t err = mSource->read(&mbuf);

            if (mPaused) {
                if (err == OK) {
                    mbuf->release();
                    mbuf = NULL;
                }

                schedulePull();
                break;
            }

            if (err != OK) {
                if (err == ERROR_END_OF_STREAM) {
                    ALOGI("stream ended.");
                } else {
                    ALOGE("error %d reading stream.", err);
                }
                ALOGI("err=%d.post kWhatEOS",err);
                sp<AMessage> notify = mNotify->dup();
                notify->setInt32("what", kWhatEOS);
                notify->post();
            } else {
            
                int64_t timeUs;
                CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
#ifdef MTB_SUPPORT
                if (mIsAudio) {
                    ATRACE_ONESHOT(ATRACE_ONESHOT_ADATA, "AudioPuller, TS: %lld ms", timeUs/1000);
                }
                else {
                    ATRACE_ONESHOT(ATRACE_ONESHOT_VDATA, "VideoPuller, TS: %lld ms", timeUs/1000);
                }
#endif				
                sp<ABuffer> accessUnit = new ABuffer(mbuf->range_length());

                memcpy(accessUnit->data(),
                       (const uint8_t *)mbuf->data() + mbuf->range_offset(),
                       mbuf->range_length());

                accessUnit->meta()->setInt64("timeUs", timeUs);

#ifndef ANDROID_DEFAULT_CODE	
		sp<WfdDebugInfo> debugInfo= defaultWfdDebugInfo();
	   	 int64_t MpMs = ALooper::GetNowUs();
		 debugInfo->addTimeInfoByKey(!mIsAudio , timeUs, "MpIn", MpMs/1000);	
		 
		 int64_t NowMpDelta =0;
		
		 NowMpDelta = (MpMs - timeUs)/1000;	
		 
		 if(mFirstDeltaMs == -1){
			mFirstDeltaMs = NowMpDelta;
			ALOGE("[check Input 1th][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms",
			 		mIsAudio?"audio":"video",timeUs/1000,NowMpDelta);
		 }	
		 NowMpDelta = NowMpDelta - mFirstDeltaMs;
		
		 if(NowMpDelta > 500ll || NowMpDelta < -500ll ){
			 ALOGE("[check Input][%s] ,timestamp=%lld ms,[ts and now delta change]=%lld ms",
			 		mIsAudio?"audio":"video",timeUs/1000,NowMpDelta);
		 }
		 
#endif

		 
                if (mIsAudio) {
                    mbuf->release();
                    mbuf = NULL;
		      ALOGI("[WFDP][%s] ,timestamp=%lld ms",mIsAudio?"audio":"video",timeUs/1000);
                } else {
                    // video encoder will release MediaBuffer when done
                    // with underlying data.
                    accessUnit->meta()->setPointer("mediaBuffer", mbuf);
		      ALOGI("[WFDP][%s] ,mediaBuffer=%p,timestamp=%lld ms",mIsAudio?"audio":"video",mbuf,timeUs/1000);
                }

                sp<AMessage> notify = mNotify->dup();

                notify->setInt32("what", kWhatAccessUnit);
                notify->setBuffer("accessUnit", accessUnit);
                notify->post();

                if (mbuf != NULL) {
                    ALOGV("posted mbuf %p", mbuf);
                }

                schedulePull();
#ifdef MTB_SUPPORT			
                if (mIsAudio) {
                    ATRACE_END_EXT("AudioPuller, kWhatPull");
                } else {
                    ATRACE_END_EXT("VideoPuller, kWhatPull");
                }
#endif	
            }
            break;
        }

        case kWhatPause:
        {
            mPaused = true;
            break;
        }

        case kWhatResume:
        {
            mPaused = false;
            break;
        }
        default:
            TRESPASS();
    }
}
コード例 #16
0
M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext,
        sp<MetaData> metaData) {
    M4OSA_ERR err = M4NO_ERROR;
    VideoEditorVideoEncoder_Context*  pEncoderContext = M4OSA_NULL;
    status_t result = OK;
    int32_t nbBuffer = 0;
    int32_t stride = 0;
    int32_t height = 0;
    int32_t framerate = 0;
    int32_t isCodecConfig = 0;
    size_t size = 0;
    uint32_t codecFlags = 0;
    MediaBuffer* inputBuffer = NULL;
    MediaBuffer* outputBuffer = NULL;
    sp<VideoEditorVideoEncoderSource> encoderSource = NULL;
    sp<MediaSource> encoder = NULL;;
    OMXClient client;

    ALOGV("VideoEditorVideoEncoder_getDSI begin");
    // Input parameters check
    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext,       M4ERR_PARAMETER);
    VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER);

    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
    VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE);

    // Create the encoder source
    encoderSource = VideoEditorVideoEncoderSource::Create(metaData);
    VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE);

    // Connect to the OMX client
    result = client.connect();
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);

    // Create the OMX codec
    // VIDEOEDITOR_FORCECODEC MUST be defined here
    codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC;
    encoder = OMXCodec::Create(client.interface(), metaData, true,
        encoderSource, NULL, codecFlags);
    VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE);

    /**
     * Send fake frames and retrieve the DSI
     */
    // Send a fake frame to the source
    metaData->findInt32(kKeyStride,     &stride);
    metaData->findInt32(kKeyHeight,     &height);
    metaData->findInt32(kKeySampleRate, &framerate);
    size = (size_t)(stride*height*3)/2;
    inputBuffer = new MediaBuffer(size);
    inputBuffer->meta_data()->setInt64(kKeyTime, 0);
    nbBuffer = encoderSource->storeBuffer(inputBuffer);
    encoderSource->storeBuffer(NULL); // Signal EOS

    // Call read once to get the DSI
    result = encoder->start();;
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    result = encoder->read(&outputBuffer, NULL);
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32(
        kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE);

    VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE);
    if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) {
        // For H264, format the DSI
        result = buildAVCCodecSpecificData(
            (uint8_t**)(&(pEncoderContext->mHeader.pBuf)),
            (size_t*)(&(pEncoderContext->mHeader.Size)),
            (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(),
            outputBuffer->range_length(), encoder->getFormat().get());
        outputBuffer->release();
        VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);
    } else {
        // For MPEG4, just copy the DSI
        pEncoderContext->mHeader.Size =
            (M4OSA_UInt32)outputBuffer->range_length();
        SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8,
            pEncoderContext->mHeader.Size, "Encoder header");
        memcpy((void *)pEncoderContext->mHeader.pBuf,
            (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()),
            pEncoderContext->mHeader.Size);
        outputBuffer->release();
    }

    result = encoder->stop();
    VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE);

cleanUp:
    // Destroy the graph
    if ( encoder != NULL ) { encoder.clear(); }
    client.disconnect();
    if ( encoderSource != NULL ) { encoderSource.clear(); }
    if ( M4NO_ERROR == err ) {
        ALOGV("VideoEditorVideoEncoder_getDSI no error");
    } else {
        ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err);
    }
    ALOGV("VideoEditorVideoEncoder_getDSI end");
    return err;
}
コード例 #17
0
status_t ExtendedWriter::threadFunc() {
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    bool stoppedPrematurely = true;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;

    pid_t tid  = gettid();
    androidSetThreadPriority(tid, ANDROID_PRIORITY_AUDIO);
    prctl(PR_SET_NAME, (unsigned long)"ExtendedWriter", 0, 0, 0);
    while (!mDone) {
        MediaBuffer *buffer;
        err = mSource->read(&buffer);

        if (err != OK) {
            break;
        }

        if (mPaused) {
            buffer->release();
            buffer = NULL;
            continue;
        }

        mEstimatedSizeBytes += buffer->range_length();
        if (exceedsFileSizeLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        int64_t timestampUs;
        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
        if (timestampUs > mEstimatedDurationUs) {
            mEstimatedDurationUs = timestampUs;
        }
        if (mResumed) {
            previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000);
            mResumed = false;
        }
        timestampUs -= previousPausedDurationUs;
        ALOGV("time stamp: %lld, previous paused duration: %lld",
                timestampUs, previousPausedDurationUs);
        if (timestampUs > maxTimestampUs) {
            maxTimestampUs = timestampUs;
        }

        if (exceedsFileDurationLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }
        ssize_t n = fwrite(
                (const uint8_t *)buffer->data() + buffer->range_offset(),
                1,
                buffer->range_length(),
                mFile);
        mOffset += n;

        if (n < (ssize_t)buffer->range_length()) {
            buffer->release();
            buffer = NULL;

            break;
        }

        // XXX: How to tell it is stopped prematurely?
        if (stoppedPrematurely) {
            stoppedPrematurely = false;
        }

        buffer->release();
        buffer = NULL;
    }

    if (stoppedPrematurely) {
        notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
    }

    if ( mFormat == AUDIO_FORMAT_QCELP ) {
        writeQCPHeader( );
    }
    else if ( mFormat == AUDIO_FORMAT_EVRC ) {
        writeEVRCHeader( );
    }

    fflush(mFile);
    fclose(mFile);
    mFile = NULL;
    mReachedEOS = true;
    if (err == ERROR_END_OF_STREAM || (err == -ETIMEDOUT)) {
        return OK;
    }
    return err;
}
コード例 #18
0
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
{
  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aSeekTimeUs != -1) {
    MediaSource::ReadOptions options;
    options.setSeekTo(aSeekTimeUs);
    err = mVideoSource->read(&mVideoBuffer, &options);
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  if (err == OK && mVideoBuffer->range_length() > 0) {
    int64_t timeUs;
    int32_t unreadable;
    int32_t keyFrame;

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      LOG("no key time");
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
      unreadable = 0;
    }

    LOG("data: %p size: %u offset: %u length: %u unreadable: %d",
        mVideoBuffer->data(), 
        mVideoBuffer->size(),
        mVideoBuffer->range_offset(),
        mVideoBuffer->range_length(),
        unreadable);

    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
    size_t length = mVideoBuffer->range_length();

    if (unreadable) {
      LOG("video frame is unreadable");
    }

    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
      return false;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    return SetVideoFormat();
  }
  else if (err == ERROR_END_OF_STREAM) {
    return false;
  }

  return true;
}
コード例 #19
0
/**
 *******************************************************************************
 * @brief   Gets an access unit (AU) from the stream handler source.
 * @note    AU is the smallest possible amount of data to be decoded by decoder
 *
 * @param   context:       (IN) Context of the reader
 * @param   pStreamHandler (IN) The stream handler of the stream to make jump
 * @param   pAccessUnit    (I/O)Pointer to an access unit to fill with read data
 * @return    M4NO_ERROR        there is no error
 * @return    M4ERR_PARAMETER   at least one parameter is not properly set
 * @returns   M4ERR_ALLOC       memory allocation failed
 * @returns   M4WAR_NO_MORE_AU  there are no more access unit in the stream
 *******************************************************************************
*/
M4OSA_ERR VideoEditorMp3Reader_getNextAu(M4OSA_Context context,
        M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) {
    VideoEditorMp3Reader_Context *pReaderContext =
        (VideoEditorMp3Reader_Context*)context;
    M4OSA_ERR err = M4NO_ERROR;
    M4SYS_AccessUnit* pAu;
    MediaBuffer *mAudioBuffer;
    MediaSource::ReadOptions options;

    ALOGV("VideoEditorMp3Reader_getNextAu start");
    M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER,
        "VideoEditorMp3Reader_getNextAu: invalid context");
    M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER,
        "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_StreamHandler");
    M4OSA_DEBUG_IF1((pAccessUnit == 0),    M4ERR_PARAMETER,
        "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_AccessUnit");

    if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\
        mAudioStreamHandler) {
        pAu = &pReaderContext->mAudioAu;
    } else {
        ALOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n");
        return M4ERR_PARAMETER;
    }

    if (pReaderContext->mSeeking) {
        options.setSeekTo(pReaderContext->mSeekTime);
    }

    pReaderContext->mMediaSource->read(&mAudioBuffer, &options);

    if (mAudioBuffer != NULL) {
        if ((pAu->dataAddress == NULL) ||
            (pAu->size < mAudioBuffer->range_length())) {
            if (pAu->dataAddress != NULL) {
                free((M4OSA_Int32*)pAu->dataAddress);
                pAu->dataAddress = NULL;
            }
            pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc(
                (mAudioBuffer->range_length() + 3) & ~0x3,
                M4READER_MP3, (M4OSA_Char*)"pAccessUnit->m_dataAddress" );

            if (pAu->dataAddress == NULL) {
                ALOGV("VideoEditorMp3Reader_getNextAu malloc failed");
                pReaderContext->mMediaSource->stop();
                pReaderContext->mMediaSource.clear();
                pReaderContext->mDataSource.clear();

                return M4ERR_ALLOC;
            }
        }
        pAu->size = mAudioBuffer->range_length();
        memcpy((M4OSA_MemAddr8)pAu->dataAddress,
            (const char *)mAudioBuffer->data() + mAudioBuffer->range_offset(),
            mAudioBuffer->range_length());

        mAudioBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&pAu->CTS);


        pAu->CTS = pAu->CTS / 1000; /*converting the microsec to millisec */
        pAu->DTS  = pAu->CTS;
        pAu->attribute = M4SYS_kFragAttrOk;
        mAudioBuffer->release();

        ALOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS);

        pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress;
        pAccessUnit->m_size = pAu->size;
        pAccessUnit->m_CTS = pAu->CTS;
        pAccessUnit->m_DTS = pAu->DTS;
        pAccessUnit->m_attribute = pAu->attribute;
    } else {
        ALOGV("VideoEditorMp3Reader_getNextAu EOS reached.");
        pAccessUnit->m_size=0;
        err = M4WAR_NO_MORE_AU;
    }
    pAu->nbFrag = 0;

    options.clearSeekTo();
    pReaderContext->mSeeking = M4OSA_FALSE;
    mAudioBuffer = NULL;
    ALOGV("VideoEditorMp3Reader_getNextAu end");

    return err;
}
コード例 #20
0
void NuPlayer::GenericSource::readBuffer(
        bool audio, int64_t seekTimeUs, int64_t *actualTimeUs) {
    Track *track = audio ? &mAudioTrack : &mVideoTrack;
    CHECK(track->mSource != NULL);

    if (actualTimeUs) {
        *actualTimeUs = seekTimeUs;
    }

    MediaSource::ReadOptions options;

    bool seeking = false;

    if (seekTimeUs >= 0) {
        options.setSeekTo(seekTimeUs);
        seeking = true;
    }

    for (;;) {
        MediaBuffer *mbuf;
        status_t err = track->mSource->read(&mbuf, &options);

        options.clearSeekTo();

        if (err == OK) {
            size_t outLength = mbuf->range_length();

            if (audio && mAudioIsVorbis) {
                outLength += sizeof(int32_t);
            }

            sp<ABuffer> buffer = new ABuffer(outLength);

            memcpy(buffer->data(),
                   (const uint8_t *)mbuf->data() + mbuf->range_offset(),
                   mbuf->range_length());

            if (audio && mAudioIsVorbis) {
                int32_t numPageSamples;
                if (!mbuf->meta_data()->findInt32(
                            kKeyValidSamples, &numPageSamples)) {
                    numPageSamples = -1;
                }

                memcpy(buffer->data() + mbuf->range_length(),
                       &numPageSamples,
                       sizeof(numPageSamples));
            }

            int64_t timeUs;
            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));

            buffer->meta()->setInt64("timeUs", timeUs);

            if (actualTimeUs) {
                *actualTimeUs = timeUs;
            }

            mbuf->release();
            mbuf = NULL;

            if (seeking) {
                track->mPackets->queueDiscontinuity(
                        ATSParser::DISCONTINUITY_SEEK, NULL);
            }

            track->mPackets->queueAccessUnit(buffer);
            break;
        } else if (err == INFO_FORMAT_CHANGED) {
#if 0
            track->mPackets->queueDiscontinuity(
                    ATSParser::DISCONTINUITY_FORMATCHANGE, NULL);
#endif
        } else {
            track->mPackets->signalEOS(err);
            break;
        }
    }
}
コード例 #21
0
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
{
  MOZ_ASSERT(aSeekTimeUs >= -1);

  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aSeekTimeUs != -1) {
    MediaSource::ReadOptions options;
    options.setSeekTo(aSeekTimeUs);
    err = mVideoSource->read(&mVideoBuffer, &options);
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  if (err == OK && mVideoBuffer->range_length() > 0) {
    int64_t timeUs;
    int32_t keyFrame;

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      LOG("no frame time");
      return false;
    }

    if (timeUs < 0) {
      LOG("frame time %lld must be nonnegative", timeUs);
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
    }

    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
    size_t length = mVideoBuffer->range_length();

    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
      return false;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    LOG("mVideoSource INFO_FORMAT_CHANGED");
    if (!SetVideoFormat())
      return false;
    else
      return ReadVideo(aFrame, aSeekTimeUs);
  }
  else if (err == ERROR_END_OF_STREAM) {
    LOG("mVideoSource END_OF_STREAM");
  }
  else if (err != OK) {
    LOG("mVideoSource ERROR %#x", err);
  }

  return err == OK;
}
コード例 #22
0
status_t MatroskaSource::read(
        MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;

    int64_t targetSampleTimeUs = -1ll;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)
            && !mExtractor->isLiveStreaming()) {
        clearPendingFrames();

        // The audio we want is located by using the Cues to seek the video
        // stream to find the target Cluster then iterating to finalize for
        // audio.
        int64_t actualFrameTimeUs;
        mBlockIter.seek(seekTimeUs, mIsAudio, &actualFrameTimeUs);

        if (mode == ReadOptions::SEEK_CLOSEST) {
            targetSampleTimeUs = actualFrameTimeUs;
        }
    }

    while (mPendingFrames.empty()) {
        status_t err = readBlock();

        if (err != OK) {
            clearPendingFrames();

            return err;
        }
    }

    MediaBuffer *frame = *mPendingFrames.begin();
    mPendingFrames.erase(mPendingFrames.begin());

    if (mType != AVC || mNALSizeLen == 0) {
        if (targetSampleTimeUs >= 0ll) {
            frame->meta_data()->setInt64(
                    kKeyTargetTime, targetSampleTimeUs);
        }

        *out = frame;

        return OK;
    }

    // Each input frame contains one or more NAL fragments, each fragment
    // is prefixed by mNALSizeLen bytes giving the fragment length,
    // followed by a corresponding number of bytes containing the fragment.
    // We output all these fragments into a single large buffer separated
    // by startcodes (0x00 0x00 0x00 0x01).
    //
    // When mNALSizeLen is 0, we assume the data is already in the format
    // desired.

    const uint8_t *srcPtr =
        (const uint8_t *)frame->data() + frame->range_offset();

    size_t srcSize = frame->range_length();

    size_t dstSize = 0;
    MediaBuffer *buffer = NULL;
    uint8_t *dstPtr = NULL;

    for (int32_t pass = 0; pass < 2; ++pass) {
        size_t srcOffset = 0;
        size_t dstOffset = 0;
        while (srcOffset + mNALSizeLen <= srcSize) {
            size_t NALsize;
            switch (mNALSizeLen) {
                case 1: NALsize = srcPtr[srcOffset]; break;
                case 2: NALsize = U16_AT(srcPtr + srcOffset); break;
                case 3: NALsize = U24_AT(srcPtr + srcOffset); break;
                case 4: NALsize = U32_AT(srcPtr + srcOffset); break;
                default:
                    TRESPASS();
            }

            if (srcOffset + mNALSizeLen + NALsize <= srcOffset + mNALSizeLen) {
                frame->release();
                frame = NULL;

                return ERROR_MALFORMED;
            } else if (srcOffset + mNALSizeLen + NALsize > srcSize) {
                break;
            }

            if (pass == 1) {
                memcpy(&dstPtr[dstOffset], "\x00\x00\x00\x01", 4);

                if (frame != buffer) {
                    memcpy(&dstPtr[dstOffset + 4],
                           &srcPtr[srcOffset + mNALSizeLen],
                           NALsize);
                }
            }

            dstOffset += 4;  // 0x00 00 00 01
            dstOffset += NALsize;

            srcOffset += mNALSizeLen + NALsize;
        }

        if (srcOffset < srcSize) {
            // There were trailing bytes or not enough data to complete
            // a fragment.

            frame->release();
            frame = NULL;

            return ERROR_MALFORMED;
        }

        if (pass == 0) {
            dstSize = dstOffset;

            if (dstSize == srcSize && mNALSizeLen == 4) {
                // In this special case we can re-use the input buffer by substituting
                // each 4-byte nal size with a 4-byte start code
                buffer = frame;
            } else {
                buffer = new MediaBuffer(dstSize);
            }

            int64_t timeUs;
            CHECK(frame->meta_data()->findInt64(kKeyTime, &timeUs));
            int32_t isSync;
            CHECK(frame->meta_data()->findInt32(kKeyIsSyncFrame, &isSync));

            buffer->meta_data()->setInt64(kKeyTime, timeUs);
            buffer->meta_data()->setInt32(kKeyIsSyncFrame, isSync);

            dstPtr = (uint8_t *)buffer->data();
        }
    }

    if (frame != buffer) {
        frame->release();
        frame = NULL;
    }

    if (targetSampleTimeUs >= 0ll) {
        buffer->meta_data()->setInt64(
                kKeyTargetTime, targetSampleTimeUs);
    }

    *out = buffer;

    return OK;
}
コード例 #23
0
status_t VideoEditorSRC::read(
        MediaBuffer **buffer_out, const ReadOptions *options) {
    ALOGV("read %p(%p)", this, mSource.get());
    *buffer_out = NULL;

    if (!mStarted) {
        return ERROR_END_OF_STREAM;
    }

    if (mResampler) {
        // Store the seek parameters
        int64_t seekTimeUs;
        ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;
        if (options && options->getSeekTo(&seekTimeUs, &mode)) {
            ALOGV("read Seek %lld", seekTimeUs);
            mSeekTimeUs = seekTimeUs;
            mSeekMode = mode;
        }

        // We ask for 1024 frames in output
        // resampler output is always 2 channels and 32 bits
        const size_t kOutputFrameCount = 1024;
        const size_t kBytes = kOutputFrameCount * 2 * sizeof(int32_t);
        int32_t *pTmpBuffer = (int32_t *)calloc(1, kBytes);
        if (!pTmpBuffer) {
            ALOGE("calloc failed to allocate memory: %d bytes", kBytes);
            return NO_MEMORY;
        }

        // Resample to target quality
        mResampler->resample(pTmpBuffer, kOutputFrameCount, this);

        if (mStopPending) {
            stop();
            mStopPending = false;
        }

        // Change resampler and retry if format change happened
        if (mFormatChanged) {
            mFormatChanged = false;
            checkAndSetResampler();
            free(pTmpBuffer);
            return read(buffer_out, NULL);
        }

        // Create a new MediaBuffer
        int32_t outBufferSize = kOutputFrameCount * 2 * sizeof(int16_t);
        MediaBuffer* outBuffer = new MediaBuffer(outBufferSize);

        // Convert back to 2 channels and 16 bits
        ditherAndClamp(
                (int32_t *)((uint8_t*)outBuffer->data() + outBuffer->range_offset()),
                pTmpBuffer, kOutputFrameCount);
        free(pTmpBuffer);

        // Compute and set the new timestamp
        sp<MetaData> to = outBuffer->meta_data();
        int64_t totalOutDurationUs = (mAccuOutBufferSize * 1000000) / (mOutputSampleRate * 2 * 2);
        int64_t timeUs = mInitialTimeStampUs + totalOutDurationUs;
        to->setInt64(kKeyTime, timeUs);

        // update the accumulate size
        mAccuOutBufferSize += outBufferSize;
        *buffer_out = outBuffer;
    } else {
        // Resampling not required. Read and pass-through.
        MediaBuffer *aBuffer;
        status_t err = mSource->read(&aBuffer, options);
        if (err != OK) {
            ALOGV("read returns err = %d", err);
        }

        if (err == INFO_FORMAT_CHANGED) {
            checkAndSetResampler();
            return read(buffer_out, NULL);
        }

        // EOS or some other error
        if(err != OK) {
            stop();
            *buffer_out = NULL;
            return err;
        }
        *buffer_out = aBuffer;
    }

    return OK;
}
コード例 #24
0
status_t AACWriter::threadFunc() {
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;

    prctl(PR_SET_NAME, (unsigned long)"AACWriterThread", 0, 0, 0);

    while (!mDone && err == OK) {
        MediaBuffer *buffer;
        err = mSource->read(&buffer);

        if (err != OK) {
            break;
        }

        if (mPaused) {
            buffer->release();
            buffer = NULL;
            continue;
        }

        mEstimatedSizeBytes += kAdtsHeaderLength + buffer->range_length();
        if (exceedsFileSizeLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        int32_t isCodecSpecific = 0;
        if (buffer->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecSpecific) && isCodecSpecific) {
            ALOGV("Drop codec specific info buffer");
            buffer->release();
            buffer = NULL;
            continue;
        }

        int64_t timestampUs;
        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
        if (timestampUs > mEstimatedDurationUs) {
            mEstimatedDurationUs = timestampUs;
        }
        if (mResumed) {
            previousPausedDurationUs += (timestampUs - maxTimestampUs - mFrameDurationUs);
            mResumed = false;
        }
        timestampUs -= previousPausedDurationUs;
        ALOGV("time stamp: %lld, previous paused duration: %lld",
            timestampUs, previousPausedDurationUs);
        if (timestampUs > maxTimestampUs) {
            maxTimestampUs = timestampUs;
        }

        if (exceedsFileDurationLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }

        // Each output AAC audio frame to the file contains
        // 1. an ADTS header, followed by
        // 2. the compressed audio data.
        ssize_t dataLength = buffer->range_length();
        uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
        if (writeAdtsHeader(kAdtsHeaderLength + dataLength) != OK ||
            dataLength != write(mFd, data, dataLength)) {
            err = ERROR_IO;
        }

        buffer->release();
        buffer = NULL;
    }

    close(mFd);
    mFd = -1;
    mReachedEOS = true;
    if (err == ERROR_END_OF_STREAM) {
        return OK;
    }
    return err;
}
コード例 #25
0
ファイル: OggWriter.cpp プロジェクト: aurorarom/JsonUtil
status_t OggWriter::threadFunc()
{
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    bool stoppedPrematurely = true;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;
    int64_t ltotalSize = 0;
    int64_t timestampUs = 0;
    //
    int64_t tsUsPauseBeg = 0;
    int64_t tsUsPauseEnd = 0;
    //paused sample count
    int64_t smpPaused = 0;
    //
    prctl(PR_SET_NAME, (unsigned long)"OggWriter", 0, 0, 0);

    //struct sched_param param;
    //param.sched_priority = RTPM_PRIO_OMX_AUDIO;
    //sched_setscheduler(0, SCHED_RR, &param);
    //androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO);
    //while (!mDone) {
    while (1 == 1)
    {
        MediaBuffer *buffer = NULL;
        MediaBuffer *buffer1 = NULL;

        if (mDone)
        {
            buffer = new MediaBuffer(0);
            buffer1 = buffer;
        }

        LOGV("OggWriter::threadFunc:mSource->read+:buffer=%p,buffer1=%p", buffer, buffer1);
        err = mSource->read(&buffer);
        LOGV("OggWriter::threadFunc:mSource->read-,err=%d,buffer=%p", err, buffer);

        if (err != OK)
        {
            break;
        }

        LOGV("OggWriter::threadFunc:buffer->range_length()=%d", buffer->range_length());

        //buffer->range_length() == 0, ogg encoder SWIP caching data
        if (mPaused || buffer->range_length() == 0)
        {
            //mtk80721 deal pause time error+
            if (mPaused && mPausedflag)
            {
                buffer->meta_data()->findInt64(kKeyTime, &tsUsPauseBeg);
                LOGD("OggWriter::threadFunc,pausetime=%d,tsUsPauseBeg=%lld", iPausedTime, tsUsPauseBeg);
                mPausedflag =  false;
            }

            //-
            if (buffer->range_length() > 0)
            {
                //not vorbis header data should be released
                if (memcmp(buffer->data() + 29, "vorbis", 6) != 0)
                {
                    buffer->release();
                    buffer = NULL;
                    continue;
                }
                else
                {
                    LOGD("ogg header:buffer=%p,size=%d", buffer, buffer->range_length());
                }
            }
            else
            {
                buffer->release();
                buffer = NULL;
                continue;
            }
        }

        mEstimatedSizeBytes += buffer->range_length();

        if (exceedsFileSizeLimit())
        {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));

        if (timestampUs > 0)
        {
            if (timestampUs > mEstimatedDurationUs)
            {
                mEstimatedDurationUs = timestampUs;
            }

            if (mResumed)
            {
                //mtk80721 deal pause time error+
                buffer->meta_data()->findInt64(kKeyTime, &tsUsPauseEnd);
                LOGD("previousPausedDurationUs =%lld,pausetime=%d,tsUsPauseBeg=%lld,tsUsPauseEnd=%lld",
                     previousPausedDurationUs, iPausedTime, tsUsPauseBeg, tsUsPauseEnd);

                previousPausedDurationUs = previousPausedDurationUs + (tsUsPauseEnd - tsUsPauseBeg);
                smpPaused = previousPausedDurationUs * mSampleRate / 1000000ll;

                LOGD("previousPausedDurationUs =%lld,samplecount=%lld", previousPausedDurationUs, smpPaused);
                //previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000);
                //-
                mResumed = false;
            }

            timestampUs -= previousPausedDurationUs;
            LOGV("time stamp: %lld, previous paused duration: %lld", timestampUs, previousPausedDurationUs);

            if (timestampUs > maxTimestampUs)
            {
                maxTimestampUs = timestampUs;
            }
        }

        if (exceedsFileDurationLimit())
        {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }

        LOGV("OggWriter::threadFunc:fwrite");
        //write timestamp
        uint8_t *ptimestamp = (uint8_t *)buffer->data() + buffer->range_offset() + 6;
        uint64_t ts = U64LE_AT(ptimestamp);

        if (smpPaused > 0)
        {
            ts -= smpPaused;
            memcpy(ptimestamp, &ts, sizeof(int64_t));
        }

        ssize_t n = fwrite(
                        (const uint8_t *)buffer->data() + buffer->range_offset(),
                        1,
                        buffer->range_length(),
                        mFile);

        ltotalSize += n;

        if (n < (ssize_t)buffer->range_length())
        {
            buffer->release();
            buffer = NULL;
            break;
        }

        // XXX: How to tell it is stopped prematurely?
        if (stoppedPrematurely)
        {
            stoppedPrematurely = false;
        }

        LOGV("OggWriter::threadFunc:buffer->release:buffer=%p", buffer);
        buffer->release();
        buffer = NULL;
    }

    if (stoppedPrematurely)
    {
        notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
    }

    /*
    //
        int bitrate = ltotalSize  / (timestampUs/1E6) * 8;
        LOGV("ltotalSize=%lld, timestampUs=%lld, bitrate=%d",ltotalSize, timestampUs, bitrate);
     //seek to the bitrate field
        fseek(mFile, 44, SEEK_SET);
     // max bitrate
        fwrite(&bitrate, 1, sizeof(int), mFile);
     // nominal bitrate
        fwrite(&bitrate, 1, sizeof(int), mFile);
     // min bitrate
        fwrite(&bitrate, 1, sizeof(int), mFile);
    */
    fflush(mFile);
    fclose(mFile);
    mFile = NULL;
    mReachedEOS = true;

    if (err == ERROR_END_OF_STREAM)
    {
        return OK;
    }

    return err;
}
コード例 #26
0
M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer(
        M4ENCODER_Context pContext, M4OSA_Double Cts,
        M4OSA_Bool bReachedEOS) {
    M4OSA_ERR err = M4NO_ERROR;
    VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL;
    M4VIFI_ImagePlane pOutPlane[3];
    MediaBuffer* buffer = NULL;
    int32_t nbBuffer = 0;

    ALOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts  %f", Cts);
    // Input parameters check
    VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER);

    pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext;
    pOutPlane[0].pac_data = M4OSA_NULL;
    pOutPlane[1].pac_data = M4OSA_NULL;
    pOutPlane[2].pac_data = M4OSA_NULL;

    if ( M4OSA_FALSE == bReachedEOS ) {
        M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth *
            pEncoderContext->mCodecParams->FrameHeight;
        M4OSA_UInt32 sizeU = sizeY >> 2;
        M4OSA_UInt32 size  = sizeY + 2*sizeU;
        M4OSA_UInt8* pData = M4OSA_NULL;
        buffer = new MediaBuffer((size_t)size);
        pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset();

        // Prepare the output image for pre-processing
        pOutPlane[0].u_width   = pEncoderContext->mCodecParams->FrameWidth;
        pOutPlane[0].u_height  = pEncoderContext->mCodecParams->FrameHeight;
        pOutPlane[0].u_topleft = 0;
        pOutPlane[0].u_stride  = pOutPlane[0].u_width;
        pOutPlane[1].u_width   = pOutPlane[0].u_width/2;
        pOutPlane[1].u_height  = pOutPlane[0].u_height/2;
        pOutPlane[1].u_topleft = 0;
        pOutPlane[1].u_stride  = pOutPlane[0].u_stride/2;
        pOutPlane[2].u_width   = pOutPlane[1].u_width;
        pOutPlane[2].u_height  = pOutPlane[1].u_height;
        pOutPlane[2].u_topleft = 0;
        pOutPlane[2].u_stride  = pOutPlane[1].u_stride;

        pOutPlane[0].pac_data = pData;
        pOutPlane[1].pac_data = pData + sizeY;
        pOutPlane[2].pac_data = pData + sizeY + sizeU;

        // Apply pre-processing
        err = pEncoderContext->mPreProcFunction(
            pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane);
        VIDEOEDITOR_CHECK(M4NO_ERROR == err, err);

        // Convert MediaBuffer to the encoder input format if necessary
        if (pEncoderContext->mI420ColorConverter) {
            I420ColorConverter* converter = pEncoderContext->mI420ColorConverter;
            int actualWidth = pEncoderContext->mCodecParams->FrameWidth;
            int actualHeight = pEncoderContext->mCodecParams->FrameHeight;

            int encoderWidth, encoderHeight;
            ARect encoderRect;
            int encoderBufferSize;

            if (converter->getEncoderInputBufferInfo(
                actualWidth, actualHeight,
                &encoderWidth, &encoderHeight,
                &encoderRect, &encoderBufferSize) == 0) {

                MediaBuffer* newBuffer = new MediaBuffer(encoderBufferSize);

                if (converter->convertI420ToEncoderInput(
                    pData,  // srcBits
                    actualWidth, actualHeight,
                    encoderWidth, encoderHeight,
                    encoderRect,
                    (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) {
                    ALOGE("convertI420ToEncoderInput failed");
                }

                // switch to new buffer
                buffer->release();
                buffer = newBuffer;
            }
        }

        // Set the metadata
        buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000));
    }
コード例 #27
0
void MPEG4Writer::Track::threadEntry() {
    bool is_mpeg4 = false;
    sp<MetaData> meta = mSource->getFormat();
    const char *mime;
    meta->findCString(kKeyMIMEType, &mime);
    is_mpeg4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4);

    MediaBuffer *buffer;
    while (!mDone && mSource->read(&buffer) == OK) {
        if (buffer->range_length() == 0) {
            buffer->release();
            buffer = NULL;

            continue;
        }

        if (mCodecSpecificData == NULL && is_mpeg4) {
            const uint8_t *data =
                (const uint8_t *)buffer->data() + buffer->range_offset();

            const size_t size = buffer->range_length();

            size_t offset = 0;
            while (offset + 3 < size) {
                if (data[offset] == 0x00 && data[offset + 1] == 0x00
                    && data[offset + 2] == 0x01 && data[offset + 3] == 0xb6) {
                    break;
                }

                ++offset;
            }

            // CHECK(offset + 3 < size);
            if (offset + 3 >= size) {
                // XXX assume the entire first chunk of data is the codec specific
                // data.
                offset = size;
            }

            mCodecSpecificDataSize = offset;
            mCodecSpecificData = malloc(offset);
            memcpy(mCodecSpecificData, data, offset);

            buffer->set_range(buffer->range_offset() + offset, size - offset);
        }

        off_t offset = mOwner->addSample(buffer);

        SampleInfo info;
        info.size = buffer->range_length();
        info.offset = offset;

        int32_t units, scale;
        bool success =
            buffer->meta_data()->findInt32(kKeyTimeUnits, &units);
        CHECK(success);
        success =
            buffer->meta_data()->findInt32(kKeyTimeScale, &scale);
        CHECK(success);

        info.timestamp = (int64_t)units * 1000 / scale;

        mSampleInfos.push_back(info);

        buffer->release();
        buffer = NULL;
    }

    mReachedEOS = true;
}
コード例 #28
0
static VideoFrame *extractVideoFrameWithCodecFlags(
        OMXClient *client,
        const sp<MetaData> &trackMeta,
        const sp<MediaSource> &source,
        uint32_t flags,
        int64_t frameTimeUs,
        int seekMode) {

    sp<MetaData> format = source->getFormat();

#ifndef MTK_HARDWARE
    // XXX:
    // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can
    // remove this check and always set the decoder output color format
    // skip this check for software decoders
#ifndef QCOM_HARDWARE
    if (isYUV420PlanarSupported(client, trackMeta)) {
        format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
#else
    if (!(flags & OMXCodec::kSoftwareCodecsOnly)) {
        if (isYUV420PlanarSupported(client, trackMeta)) {
            format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
        }
#endif
    }
#endif

    sp<MediaSource> decoder =
        OMXCodec::Create(
                client->interface(), format, false, source,
                NULL, flags | OMXCodec::kClientNeedsFramebuffer);

    if (decoder.get() == NULL) {
        ALOGV("unable to instantiate video decoder.");

        return NULL;
    }

    status_t err = decoder->start();
    if (err != OK) {
        ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err);
        return NULL;
    }

    // Read one output buffer, ignore format change notifications
    // and spurious empty buffers.

    MediaSource::ReadOptions options;
    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {

        ALOGE("Unknown seek mode: %d", seekMode);
        return NULL;
    }

    MediaSource::ReadOptions::SeekMode mode =
            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);

    int64_t thumbNailTime;
    if (frameTimeUs < 0) {
        if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
                || thumbNailTime < 0) {
            thumbNailTime = 0;
        }
        options.setSeekTo(thumbNailTime, mode);
    } else {
        thumbNailTime = -1;
        options.setSeekTo(frameTimeUs, mode);
    }

    MediaBuffer *buffer = NULL;
    do {
        if (buffer != NULL) {
            buffer->release();
            buffer = NULL;
        }
        err = decoder->read(&buffer, &options);
        options.clearSeekTo();
    } while (err == INFO_FORMAT_CHANGED
             || (buffer != NULL && buffer->range_length() == 0));

    if (err != OK) {
        CHECK(buffer == NULL);

        ALOGV("decoding frame failed.");
        decoder->stop();

        return NULL;
    }

    ALOGV("successfully decoded video frame.");

    int32_t unreadable;
    if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)
            && unreadable != 0) {
        ALOGV("video frame is unreadable, decoder does not give us access "
             "to the video data.");

        buffer->release();
        buffer = NULL;

        decoder->stop();

        return NULL;
    }

    int64_t timeUs;
    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
    if (thumbNailTime >= 0) {
        if (timeUs != thumbNailTime) {
            const char *mime;
            CHECK(trackMeta->findCString(kKeyMIMEType, &mime));

            ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
                 thumbNailTime, timeUs, mime);
        }
    }

    sp<MetaData> meta = decoder->getFormat();

    int32_t width, height;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));

    int32_t crop_left, crop_top, crop_right, crop_bottom;
    if (!meta->findRect(
                kKeyCropRect,
                &crop_left, &crop_top, &crop_right, &crop_bottom)) {
        crop_left = crop_top = 0;
        crop_right = width - 1;
        crop_bottom = height - 1;
    }

    int32_t rotationAngle;
    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
        rotationAngle = 0;  // By default, no rotation
    }

    VideoFrame *frame = new VideoFrame;
    frame->mWidth = crop_right - crop_left + 1;
    frame->mHeight = crop_bottom - crop_top + 1;
    frame->mDisplayWidth = frame->mWidth;
    frame->mDisplayHeight = frame->mHeight;
    frame->mSize = frame->mWidth * frame->mHeight * 2;
    frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;

    int32_t displayWidth, displayHeight;
    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
        frame->mDisplayWidth = displayWidth;
    }
    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
        frame->mDisplayHeight = displayHeight;
    }

    int32_t srcFormat;
    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));

#ifdef MTK_HARDWARE
    {
        int32_t Stridewidth,SliceHeight;
        CHECK(meta->findInt32(kKeyStride, &Stridewidth));
        CHECK(meta->findInt32(kKeySliceHeight, &SliceHeight));
        ALOGD("kKeyWidth=%d,kKeyHeight=%d",width,height);
        ALOGD("Stridewidth=%d,SliceHeight=%d",Stridewidth,SliceHeight);

        width=Stridewidth;
        height=SliceHeight;
    }
#endif

    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);

    if (converter.isValid()) {
        err = converter.convert(
                (const uint8_t *)buffer->data() + buffer->range_offset(),
                width, height,
                crop_left, crop_top, crop_right, crop_bottom,
                frame->mData,
                frame->mWidth,
                frame->mHeight,
                0, 0, frame->mWidth - 1, frame->mHeight - 1);
    } else {
        ALOGE("Unable to instantiate color conversion from format 0x%08x to "
              "RGB565",
              srcFormat);

        err = ERROR_UNSUPPORTED;
    }

    buffer->release();
    buffer = NULL;

    decoder->stop();

    if (err != OK) {
        ALOGE("Colorconverter failed to convert frame.");

        delete frame;
        frame = NULL;
    }

    return frame;
}

VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
        int64_t timeUs, int option) {

    ALOGV("getFrameAtTime: %lld us option: %d", timeUs, option);

    if (mExtractor.get() == NULL) {
        ALOGV("no extractor.");
        return NULL;
    }

    sp<MetaData> fileMeta = mExtractor->getMetaData();

    if (fileMeta == NULL) {
        ALOGV("extractor doesn't publish metadata, failed to initialize?");
        return NULL;
    }

    int32_t drm = 0;
    if (fileMeta->findInt32(kKeyIsDRM, &drm) && drm != 0) {
        ALOGE("frame grab not allowed.");
        return NULL;
    }

    size_t n = mExtractor->countTracks();
    size_t i;
    for (i = 0; i < n; ++i) {
        sp<MetaData> meta = mExtractor->getTrackMetaData(i);

        const char *mime;
        CHECK(meta->findCString(kKeyMIMEType, &mime));

        if (!strncasecmp(mime, "video/", 6)) {
            break;
        }
    }

    if (i == n) {
        ALOGV("no video track found.");
        return NULL;
    }

    sp<MetaData> trackMeta = mExtractor->getTrackMetaData(
            i, MediaExtractor::kIncludeExtensiveMetaData);

    sp<MediaSource> source = mExtractor->getTrack(i);

    if (source.get() == NULL) {
        ALOGV("unable to instantiate video track.");
        return NULL;
    }

    const void *data;
    uint32_t type;
    size_t dataSize;
    if (fileMeta->findData(kKeyAlbumArt, &type, &data, &dataSize)
            && mAlbumArt == NULL) {
        mAlbumArt = new MediaAlbumArt;
        mAlbumArt->mSize = dataSize;
        mAlbumArt->mData = new uint8_t[dataSize];
        memcpy(mAlbumArt->mData, data, dataSize);
    }

    VideoFrame *frame =
        extractVideoFrameWithCodecFlags(
#ifndef QCOM_HARDWARE
                &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs,
#else
                &mClient, trackMeta, source, OMXCodec::kSoftwareCodecsOnly,
#endif
                timeUs, option);

    if (frame == NULL) {
        ALOGV("Software decoder failed to extract thumbnail, "
             "trying hardware decoder.");

        frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0,
                        timeUs, option);
    }

    return frame;
}
コード例 #29
0
status_t MatroskaSource::read(
        MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
        mBlockIter.seek(seekTimeUs);
    }

    if (mBlockIter.eos()) {
        return ERROR_END_OF_STREAM;
    }

    const mkvparser::Block *block = mBlockIter.block();
    size_t size = block->GetSize();
    int64_t timeUs = mBlockIter.blockTimeUs();

    MediaBuffer *buffer = new MediaBuffer(size + 2);
    buffer->meta_data()->setInt64(kKeyTime, timeUs);
    buffer->meta_data()->setInt32(kKeyIsSyncFrame, block->IsKey());

    long res = block->Read(
            mExtractor->mReader, (unsigned char *)buffer->data() + 2);

    if (res != 0) {
        return ERROR_END_OF_STREAM;
    }

    buffer->set_range(2, size);

    if (mType == AVC) {
        CHECK(size >= 2);

        uint8_t *data = (uint8_t *)buffer->data();

        unsigned NALsize = data[2] << 8 | data[3];
        CHECK_EQ(size, NALsize + 2);

        memcpy(data, "\x00\x00\x00\x01", 4);
        buffer->set_range(0, size + 2);
    } else if (mType == AAC) {
        // There's strange junk at the beginning...

        const uint8_t *data = (const uint8_t *)buffer->data() + 2;
        size_t offset = 0;
        while (offset < size && data[offset] != 0x21) {
            ++offset;
        }
        buffer->set_range(2 + offset, size - offset);
    }

    *out = buffer;

#if 0
    hexdump((const uint8_t *)buffer->data() + buffer->range_offset(),
            buffer->range_length());
#endif

    mBlockIter.advance();

    return OK;
}