Exemple #1
0
static void playSource(OMXClient *client, sp<MediaSource> &source) {
    sp<MetaData> meta = source->getFormat();

    const char *mime;
    CHECK(meta->findCString(kKeyMIMEType, &mime));

    sp<MediaSource> rawSource;
    if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
        rawSource = source;
    } else {
        rawSource = OMXCodec::Create(
            client->interface(), meta, false /* createEncoder */, source,
            NULL /* matchComponentName */,
            gPreferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0);

        if (rawSource == NULL) {
            fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime);
            return;
        }
    }

    source.clear();

    status_t err = rawSource->start();

    if (err != OK) {
        fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err);
        return;
    }

    if (gPlaybackAudio) {
        AudioPlayer *player = new AudioPlayer(NULL);
        player->setSource(rawSource);
        rawSource.clear();

        player->start(true /* sourceAlreadyStarted */);

        status_t finalStatus;
        while (!player->reachedEOS(&finalStatus)) {
            usleep(100000ll);
        }

        delete player;
        player = NULL;

        return;
    } else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
        int64_t durationUs;
        CHECK(meta->findInt64(kKeyDuration, &durationUs));

        status_t err;
        MediaBuffer *buffer;
        MediaSource::ReadOptions options;
        int64_t seekTimeUs = -1;
        for (;;) {
            err = rawSource->read(&buffer, &options);
            options.clearSeekTo();

            bool shouldSeek = false;
            if (err == INFO_FORMAT_CHANGED) {
                CHECK(buffer == NULL);

                printf("format changed.\n");
                continue;
            } else if (err != OK) {
                printf("reached EOF.\n");

                shouldSeek = true;
            } else {
                int64_t timestampUs;
                CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));

                bool failed = false;

                if (seekTimeUs >= 0) {
                    int64_t diff = timestampUs - seekTimeUs;

                    if (diff < 0) {
                        diff = -diff;
                    }

                    if ((gReproduceBug == 4 && diff > 500000)
                        || (gReproduceBug == 5 && timestampUs < 0)) {
                        printf("wanted: %.2f secs, got: %.2f secs\n",
                               seekTimeUs / 1E6, timestampUs / 1E6);

                        printf("ERROR: ");
                        failed = true;
                    }
                }

                printf("buffer has timestamp %lld us (%.2f secs)\n",
                       timestampUs, timestampUs / 1E6);

                buffer->release();
                buffer = NULL;

                if (failed) {
                    break;
                }

                shouldSeek = ((double)rand() / RAND_MAX) < 0.1;

                if (gReproduceBug == 3) {
                    shouldSeek = false;
                }
            }

            seekTimeUs = -1;

            if (shouldSeek) {
                seekTimeUs = (rand() * (float)durationUs) / RAND_MAX;
                options.setSeekTo(seekTimeUs);

                printf("seeking to %lld us (%.2f secs)\n",
                       seekTimeUs, seekTimeUs / 1E6);
            }
        }

        rawSource->stop();

        return;
    }

    int n = 0;
    int64_t startTime = getNowUs();

    long numIterationsLeft = gNumRepetitions;
    MediaSource::ReadOptions options;

    int64_t sumDecodeUs = 0;
    int64_t totalBytes = 0;

    while (numIterationsLeft-- > 0) {
        long numFrames = 0;

        MediaBuffer *buffer;

        for (;;) {
            int64_t startDecodeUs = getNowUs();
            status_t err = rawSource->read(&buffer, &options);
            int64_t delayDecodeUs = getNowUs() - startDecodeUs;

            options.clearSeekTo();

            if (err != OK) {
                CHECK(buffer == NULL);

                if (err == INFO_FORMAT_CHANGED) {
                    printf("format changed.\n");
                    continue;
                }

                break;
            }

            if (buffer->range_length() > 0 && (n++ % 16) == 0) {
                printf(".");
                fflush(stdout);
            }

            sumDecodeUs += delayDecodeUs;
            totalBytes += buffer->range_length();

            buffer->release();
            buffer = NULL;

            ++numFrames;
            if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) {
                break;
            }

            if (gReproduceBug == 1 && numFrames == 40) {
                printf("seeking past the end now.");
                options.setSeekTo(0x7fffffffL);
            } else if (gReproduceBug == 2 && numFrames == 40) {
                printf("seeking to 5 secs.");
                options.setSeekTo(5000000);
            }
        }

        printf("$");
        fflush(stdout);

        options.setSeekTo(0);
    }

    rawSource->stop();
    printf("\n");

    int64_t delay = getNowUs() - startTime;
    if (!strncasecmp("video/", mime, 6)) {
        printf("avg. %.2f fps\n", n * 1E6 / delay);

        printf("avg. time to decode one buffer %.2f usecs\n",
               (double)sumDecodeUs / n);

        printf("decoded a total of %d frame(s).\n", n);
    } else if (!strncasecmp("audio/", mime, 6)) {
        // Frame count makes less sense for audio, as the output buffer
        // sizes may be different across decoders.
        printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay);

        printf("decoded a total of %lld bytes\n", totalBytes);
    }
}
Exemple #2
0
status_t OggWriter::threadFunc()
{
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    bool stoppedPrematurely = true;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;
    int64_t ltotalSize = 0;
    int64_t timestampUs = 0;
    //
    int64_t tsUsPauseBeg = 0;
    int64_t tsUsPauseEnd = 0;
    //paused sample count
    int64_t smpPaused = 0;
    //
    prctl(PR_SET_NAME, (unsigned long)"OggWriter", 0, 0, 0);

    //struct sched_param param;
    //param.sched_priority = RTPM_PRIO_OMX_AUDIO;
    //sched_setscheduler(0, SCHED_RR, &param);
    //androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO);
    //while (!mDone) {
    while (1 == 1)
    {
        MediaBuffer *buffer = NULL;
        MediaBuffer *buffer1 = NULL;

        if (mDone)
        {
            buffer = new MediaBuffer(0);
            buffer1 = buffer;
        }

        LOGV("OggWriter::threadFunc:mSource->read+:buffer=%p,buffer1=%p", buffer, buffer1);
        err = mSource->read(&buffer);
        LOGV("OggWriter::threadFunc:mSource->read-,err=%d,buffer=%p", err, buffer);

        if (err != OK)
        {
            break;
        }

        LOGV("OggWriter::threadFunc:buffer->range_length()=%d", buffer->range_length());

        //buffer->range_length() == 0, ogg encoder SWIP caching data
        if (mPaused || buffer->range_length() == 0)
        {
            //mtk80721 deal pause time error+
            if (mPaused && mPausedflag)
            {
                buffer->meta_data()->findInt64(kKeyTime, &tsUsPauseBeg);
                LOGD("OggWriter::threadFunc,pausetime=%d,tsUsPauseBeg=%lld", iPausedTime, tsUsPauseBeg);
                mPausedflag =  false;
            }

            //-
            if (buffer->range_length() > 0)
            {
                //not vorbis header data should be released
                if (memcmp(buffer->data() + 29, "vorbis", 6) != 0)
                {
                    buffer->release();
                    buffer = NULL;
                    continue;
                }
                else
                {
                    LOGD("ogg header:buffer=%p,size=%d", buffer, buffer->range_length());
                }
            }
            else
            {
                buffer->release();
                buffer = NULL;
                continue;
            }
        }

        mEstimatedSizeBytes += buffer->range_length();

        if (exceedsFileSizeLimit())
        {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));

        if (timestampUs > 0)
        {
            if (timestampUs > mEstimatedDurationUs)
            {
                mEstimatedDurationUs = timestampUs;
            }

            if (mResumed)
            {
                //mtk80721 deal pause time error+
                buffer->meta_data()->findInt64(kKeyTime, &tsUsPauseEnd);
                LOGD("previousPausedDurationUs =%lld,pausetime=%d,tsUsPauseBeg=%lld,tsUsPauseEnd=%lld",
                     previousPausedDurationUs, iPausedTime, tsUsPauseBeg, tsUsPauseEnd);

                previousPausedDurationUs = previousPausedDurationUs + (tsUsPauseEnd - tsUsPauseBeg);
                smpPaused = previousPausedDurationUs * mSampleRate / 1000000ll;

                LOGD("previousPausedDurationUs =%lld,samplecount=%lld", previousPausedDurationUs, smpPaused);
                //previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000);
                //-
                mResumed = false;
            }

            timestampUs -= previousPausedDurationUs;
            LOGV("time stamp: %lld, previous paused duration: %lld", timestampUs, previousPausedDurationUs);

            if (timestampUs > maxTimestampUs)
            {
                maxTimestampUs = timestampUs;
            }
        }

        if (exceedsFileDurationLimit())
        {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }

        LOGV("OggWriter::threadFunc:fwrite");
        //write timestamp
        uint8_t *ptimestamp = (uint8_t *)buffer->data() + buffer->range_offset() + 6;
        uint64_t ts = U64LE_AT(ptimestamp);

        if (smpPaused > 0)
        {
            ts -= smpPaused;
            memcpy(ptimestamp, &ts, sizeof(int64_t));
        }

        ssize_t n = fwrite(
                        (const uint8_t *)buffer->data() + buffer->range_offset(),
                        1,
                        buffer->range_length(),
                        mFile);

        ltotalSize += n;

        if (n < (ssize_t)buffer->range_length())
        {
            buffer->release();
            buffer = NULL;
            break;
        }

        // XXX: How to tell it is stopped prematurely?
        if (stoppedPrematurely)
        {
            stoppedPrematurely = false;
        }

        LOGV("OggWriter::threadFunc:buffer->release:buffer=%p", buffer);
        buffer->release();
        buffer = NULL;
    }

    if (stoppedPrematurely)
    {
        notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
    }

    /*
    //
        int bitrate = ltotalSize  / (timestampUs/1E6) * 8;
        LOGV("ltotalSize=%lld, timestampUs=%lld, bitrate=%d",ltotalSize, timestampUs, bitrate);
     //seek to the bitrate field
        fseek(mFile, 44, SEEK_SET);
     // max bitrate
        fwrite(&bitrate, 1, sizeof(int), mFile);
     // nominal bitrate
        fwrite(&bitrate, 1, sizeof(int), mFile);
     // min bitrate
        fwrite(&bitrate, 1, sizeof(int), mFile);
    */
    fflush(mFile);
    fclose(mFile);
    mFile = NULL;
    mReachedEOS = true;

    if (err == ERROR_END_OF_STREAM)
    {
        return OK;
    }

    return err;
}
status_t AMRSource::read(
        MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
#ifdef MTK_AOSP_ENHANCEMENT

        // Maybe risk (int)index >= (uint)mOffsetTableLength warning: comparison between signed and unsigned integer expressions
        // should check seekTimeUs < 0 case
	if (seekTimeUs < 0) {
	    ALOGW("seekTimeUs:%lld < 0", seekTimeUs);
	    seekTimeUs = 0;
	}
#endif
        size_t size;
        int64_t seekFrame = seekTimeUs / 20000ll;  // 20ms per frame.
        mCurrentTimeUs = seekFrame * 20000ll;

        size_t index = seekFrame < 0 ? 0 : seekFrame / 50;
        if (index >= mOffsetTableLength) {
            index = mOffsetTableLength - 1;
        }

        mOffset = mOffsetTable[index] + (mIsWide ? 9 : 6);

        for (size_t i = 0; i< seekFrame - index * 50; i++) {
            status_t err;
            if ((err = getFrameSizeByOffset(mDataSource, mOffset,
                            mIsWide, &size)) != OK) {
                return err;
            }
            mOffset += size;
        }
    }

    uint8_t header;
    ssize_t n = mDataSource->readAt(mOffset, &header, 1);

    if (n < 1) {
        return ERROR_END_OF_STREAM;
    }
#ifdef MTK_AOSP_ENHANCEMENT

    int count = 0;
	while(header & 0x83)
	{  
        if ((count % 10) == 0)
            SXLOGW("AMRSource::read--Frame head error,skip until to find an valid one count %d",count);
	    mOffset++;
	    count++;
	    if (count>320) {
	    	SXLOGW("getFrameSizeByOffset--can not find the correct frame header in 64 byte");
	        return ERROR_END_OF_STREAM;
	    }
		n = mDataSource->readAt(mOffset, &header, 1);
	    if (n < 1) {
            return ERROR_END_OF_STREAM;
		}
	}
#else
    if (header & 0x83) {
        // Padding bits must be 0.

        ALOGE("padding bits must be 0, header is 0x%02x", header);

        return ERROR_MALFORMED;
    }
#endif

    unsigned FT = (header >> 3) & 0x0f;

    size_t frameSize = getFrameSize(mIsWide, FT);
    if (frameSize == 0) {
        return ERROR_MALFORMED;
    }

    MediaBuffer *buffer;
    status_t err = mGroup->acquire_buffer(&buffer);
    if (err != OK) {
        return err;
    }

    n = mDataSource->readAt(mOffset, buffer->data(), frameSize);

    if (n != (ssize_t)frameSize) {
        buffer->release();
        buffer = NULL;

        return ERROR_IO;
    }

    buffer->set_range(0, frameSize);
    buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs);
    buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);

    mOffset += frameSize;
    mCurrentTimeUs += 20000;  // Each frame is 20ms

    *out = buffer;

    return OK;
}
Exemple #4
0
status_t AACWriter::threadFunc() {
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;
    bool stoppedPrematurely = true;

    prctl(PR_SET_NAME, (unsigned long)"AACWriterThread", 0, 0, 0);

    while (!mDone && err == OK) {
        MediaBuffer *buffer;
        err = mSource->read(&buffer);

        if (err != OK) {
            break;
        }

        if (mPaused) {
            buffer->release();
            buffer = NULL;
            continue;
        }

        mEstimatedSizeBytes += kAdtsHeaderLength + buffer->range_length();
        if (exceedsFileSizeLimit()) {
            buffer->release();
            buffer = NULL;
#ifndef ANDROID_DEFAULT_CODE
ALOGW("reach max file size limit,mMaxFileSizeLimitBytes=%lld",mMaxFileSizeLimitBytes);
#endif
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        int32_t isCodecSpecific = 0;
        if (buffer->meta_data()->findInt32(kKeyIsCodecConfig, &isCodecSpecific) && isCodecSpecific) {
            ALOGV("Drop codec specific info buffer");
            buffer->release();
            buffer = NULL;
            continue;
        }

        int64_t timestampUs;
        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
        if (timestampUs > mEstimatedDurationUs) {
            mEstimatedDurationUs = timestampUs;
        }
        if (mResumed) {
            previousPausedDurationUs += (timestampUs - maxTimestampUs - mFrameDurationUs);
            mResumed = false;
        }
        timestampUs -= previousPausedDurationUs;
        ALOGV("time stamp: %lld, previous paused duration: %lld",
            timestampUs, previousPausedDurationUs);
        if (timestampUs > maxTimestampUs) {
            maxTimestampUs = timestampUs;
        }

        if (exceedsFileDurationLimit()) {
            buffer->release();
            buffer = NULL;
#ifndef ANDROID_DEFAULT_CODE
ALOGW("reach max file duration limit,mMaxFileDurationLimitUs=%lld",mMaxFileDurationLimitUs);
#endif
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }

        // Each output AAC audio frame to the file contains
        // 1. an ADTS header, followed by
        // 2. the compressed audio data.
        ssize_t dataLength = buffer->range_length();
#ifndef ANDROID_DEFAULT_CODE
        if (dataLength==0)
        {
ALOGW("threadFunc, read buffer length == 0");
            buffer->release();
            buffer = NULL;
            continue;
        }
#endif
        uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset();
        if (writeAdtsHeader(kAdtsHeaderLength + dataLength) != OK ||
            dataLength != write(mFd, data, dataLength)) {
            err = ERROR_IO;
        }

        buffer->release();
        buffer = NULL;

        if (err != OK) {
            break;
        }

        if (stoppedPrematurely) {
            stoppedPrematurely = false;
        }
    }

    if ((err == OK || err == ERROR_END_OF_STREAM) && stoppedPrematurely) {
#ifndef ANDROID_DEFAULT_CODE
ALOGE("threadFunc,no frame writen to file");
#endif
        err = ERROR_MALFORMED;
    }

    close(mFd);
    mFd = -1;
    mReachedEOS = true;
    if (err == ERROR_END_OF_STREAM) {
        return OK;
    }
    return err;
}
status_t VideoEditorSRC::read(
        MediaBuffer **buffer_out, const ReadOptions *options) {
    ALOGV("read %p(%p)", this, mSource.get());
    *buffer_out = NULL;

    if (!mStarted) {
        return ERROR_END_OF_STREAM;
    }

    if (mResampler) {
        // Store the seek parameters
        int64_t seekTimeUs;
        ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;
        if (options && options->getSeekTo(&seekTimeUs, &mode)) {
            ALOGV("read Seek %lld", seekTimeUs);
            mSeekTimeUs = seekTimeUs;
            mSeekMode = mode;
        }

        // We ask for 1024 frames in output
        // resampler output is always 2 channels and 32 bits
        const size_t kOutputFrameCount = 1024;
        const size_t kBytes = kOutputFrameCount * 2 * sizeof(int32_t);
        int32_t *pTmpBuffer = (int32_t *)calloc(1, kBytes);
        if (!pTmpBuffer) {
            ALOGE("calloc failed to allocate memory: %d bytes", kBytes);
            return NO_MEMORY;
        }

        // Resample to target quality
        mResampler->resample(pTmpBuffer, kOutputFrameCount, this);

        if (mStopPending) {
            stop();
            mStopPending = false;
        }

        // Change resampler and retry if format change happened
        if (mFormatChanged) {
            mFormatChanged = false;
            checkAndSetResampler();
            free(pTmpBuffer);
            return read(buffer_out, NULL);
        }

        // Create a new MediaBuffer
        int32_t outBufferSize = kOutputFrameCount * 2 * sizeof(int16_t);
        MediaBuffer* outBuffer = new MediaBuffer(outBufferSize);

        // Convert back to 2 channels and 16 bits
        ditherAndClamp(
                (int32_t *)((uint8_t*)outBuffer->data() + outBuffer->range_offset()),
                pTmpBuffer, kOutputFrameCount);
        free(pTmpBuffer);

        // Compute and set the new timestamp
        sp<MetaData> to = outBuffer->meta_data();
        int64_t totalOutDurationUs = (mAccuOutBufferSize * 1000000) / (mOutputSampleRate * 2 * 2);
        int64_t timeUs = mInitialTimeStampUs + totalOutDurationUs;
        to->setInt64(kKeyTime, timeUs);

        // update the accumulate size
        mAccuOutBufferSize += outBufferSize;
        *buffer_out = outBuffer;
    } else {
        // Resampling not required. Read and pass-through.
        MediaBuffer *aBuffer;
        status_t err = mSource->read(&aBuffer, options);
        if (err != OK) {
            ALOGV("read returns err = %d", err);
        }

        if (err == INFO_FORMAT_CHANGED) {
            checkAndSetResampler();
            return read(buffer_out, NULL);
        }

        // EOS or some other error
        if(err != OK) {
            stop();
            *buffer_out = NULL;
            return err;
        }
        *buffer_out = aBuffer;
    }

    return OK;
}
Exemple #6
0
status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
    *out = NULL;

    MediaBuffer *buffer = NULL;
    int64_t timeUs = -1;

    for (;;) {
        size_t i;
        size_t packetSize = 0;
        bool gotFullPacket = false;
        for (i = mNextLaceIndex; i < mCurrentPage.mNumSegments; ++i) {
            uint8_t lace = mCurrentPage.mLace[i];

            packetSize += lace;

            if (lace < 255) {
                gotFullPacket = true;
                ++i;
                break;
            }
        }

        if (mNextLaceIndex < mCurrentPage.mNumSegments) {
            off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
            for (size_t j = 0; j < mNextLaceIndex; ++j) {
                dataOffset += mCurrentPage.mLace[j];
            }

            size_t fullSize = packetSize;
            if (buffer != NULL) {
                fullSize += buffer->range_length();
            }
            MediaBuffer *tmp = new MediaBuffer(fullSize);
            if (buffer != NULL) {
                memcpy(tmp->data(), buffer->data(), buffer->range_length());
                tmp->set_range(0, buffer->range_length());
                buffer->release();
            } else {
                // XXX Not only is this not technically the correct time for
                // this packet, we also stamp every packet in this page
                // with the same time. This needs fixing later.

                if (mVi.rate) {
                    // Rate may not have been initialized yet if we're currently
                    // reading the configuration packets...
                    // Fortunately, the timestamp doesn't matter for those.
#ifdef MTK_AOSP_ENHANCEMENT
                    timeUs = (mCurrentPage.mGranulePosition - mCurrentPageSamples) * 1000000ll / mVi.rate;//mPrevGranulePosition
#else                   
                    timeUs = mCurrentPage.mGranulePosition * 1000000ll / mVi.rate;
#endif
                }
                tmp->set_range(0, 0);
            }
            buffer = tmp;

            ssize_t n = mSource->readAt(
                    dataOffset,
                    (uint8_t *)buffer->data() + buffer->range_length(),
                    packetSize);

            if (n < (ssize_t)packetSize) {
                ALOGV("failed to read %zu bytes at 0x%016llx, got %zd bytes",
                     packetSize, dataOffset, n);
                return ERROR_IO;
            }

            buffer->set_range(0, fullSize);

            mNextLaceIndex = i;

            if (gotFullPacket) {
                // We've just read the entire packet.

                if (timeUs >= 0) {
                    buffer->meta_data()->setInt64(kKeyTime, timeUs);
                }

                if (mFirstPacketInPage) {
                    buffer->meta_data()->setInt32(
                            kKeyValidSamples, mCurrentPageSamples);
                    mFirstPacketInPage = false;
                }

                *out = buffer;

                return OK;
            }

            // fall through, the buffer now contains the start of the packet.
        }

        CHECK_EQ(mNextLaceIndex, mCurrentPage.mNumSegments);

        mOffset += mCurrentPageSize;
        ssize_t n = readPage(mOffset, &mCurrentPage);

        if (n <= 0) {
            if (buffer) {
                buffer->release();
                buffer = NULL;
            }

            ALOGV("readPage returned %zd", n);

            return n < 0 ? n : (status_t)ERROR_END_OF_STREAM;
        }

        mCurrentPageSamples =
            mCurrentPage.mGranulePosition - mPrevGranulePosition;
        mFirstPacketInPage = true;

#ifdef MTK_AOSP_ENHANCEMENT
        if(mPrevGranulePosition > 0xFFFFFFFFFFFF)
        {
            mCurrentPageSamples = 0;
            SXLOGD("revise the timestamp to page granule position");
        }
#endif

        mPrevGranulePosition = mCurrentPage.mGranulePosition;

        mCurrentPageSize = n;
        mNextLaceIndex = 0;

        if (buffer != NULL) {
            if ((mCurrentPage.mFlags & 1) == 0) {
                // This page does not continue the packet, i.e. the packet
                // is already complete.

                if (timeUs >= 0) {
                    buffer->meta_data()->setInt64(kKeyTime, timeUs);
                }

                buffer->meta_data()->setInt32(
                        kKeyValidSamples, mCurrentPageSamples);
                mFirstPacketInPage = false;

                *out = buffer;

                return OK;
            }
        }
    }
}
Exemple #7
0
int AudioFDK::Update()
{
	LOGTRACE("%s", __func__);
	LOGTHREAD("Audio Update Thread Running - waiting = %s", mWaiting ? "true" : "false");

	// Check to see if there is a target state on the queue.
	if (targetStateCount() > 0)
	{
		// If there is at least one target state, handle that one state, and then return if it makes sense to do so,
		// or allow it to run through the rest of the update, if that makes sense for the state.
		// Additional items on the queue will be handled the next time through.
		TargetState ts = popTargetState();
		switch (ts.state)
		{
		case STOPPED:
			if (doStop(ts.data))
			{
				LOGI("Stopped: state=%s", getStateString(mPlayState));
				return AUDIOTHREAD_CONTINUE;
			}
			break;
		case PAUSED:
			{
				// Pause the java track here so that we aren't in the middle of feeding it data when we pause.
				JNIEnv* env;
				if (gHLSPlayerSDK->GetEnv(&env))
					env->CallNonvirtualVoidMethod(mTrack, mCAudioTrack, mPause);
				SetState(PAUSED, __func__);
			}
			break;
		}
	}

	if (mWaiting) return AUDIOTHREAD_WAIT;
	if (mPlayState != PLAYING)
	{
		while (mPlayState == INITIALIZED)
		{
			LOGI("Audio Thread initialized. Waiting to start | semPause.count = %d", semPause.count);
			sem_wait(&semPause);
			return AUDIOTHREAD_CONTINUE; // Make sure we check the state queue, before continuing
		}

		if (mPlayState == PAUSED)
		{
			LOGI("Pausing Audio Thread: state = PAUSED | semPause.count = %d", semPause.count );


			sem_wait(&semPause);
			return AUDIOTHREAD_CONTINUE; // Make sure we check the state queue, before continuing
		}

		if (mPlayState == SEEKING)
		{
			LOGI("Pausing Audio Thread: state = SEEKING | semPause.count = %d", semPause.count );
			sem_wait(&semPause);
			LOGI("Resuming Audio Thread: state = %d | semPause.count = %d", mPlayState, semPause.count );
			return AUDIOTHREAD_CONTINUE; // Make sure we check the state queue, before continuing
		}

		if (mPlayState == STOPPED)
		{
			LOGI("mPlayState == STOPPED. Ending audio update thread!");
			return AUDIOTHREAD_FINISH; // We don't really want to add more stuff to the buffer
			// and potentially run past the end of buffered source data
			// if we're not actively playing
		}
	}




	JNIEnv* env;
	if (!gHLSPlayerSDK->GetEnv(&env))
		return AUDIOTHREAD_FINISH; // If we don't have a java environment at this point, something has killed it,
	// so we better kill the thread.

	AutoLock updateLocker(&updateMutex, __func__);

	MediaBuffer* mediaBuffer = NULL;

	//LOGI("Reading to the media buffer");
	status_t res = OK;

	if(mAudioSource.get())
		res = mAudioSource->read(&mediaBuffer, NULL);
	else if(mAudioSource23.get())
		res = mAudioSource23->read(&mediaBuffer, NULL);
	else
	{
		res = OK;
	}

	if (res == OK && mTrack)
	{
		//LOGI("Finished reading from the media buffer");
		RUNDEBUG( {if (mediaBuffer) mediaBuffer->meta_data()->dumpToLog();} );
Exemple #8
0
status_t APESource::read(
    MediaBuffer **out, const ReadOptions *options)
{
    *out = NULL;
    uint32_t newframe = 0 , firstbyte = 0;

    ///LOGV("APESource::read");
    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    int32_t bitrate = 0;

    if (!mMeta->findInt32(kKeyBitRate, &bitrate)
            || !mMeta->findInt32(kKeySampleRate, &mSampleRate))
    {
        LOGI("no bitrate");
        return ERROR_UNSUPPORTED;
    }

    if (options != NULL && options->getSeekTo(&seekTimeUs, &mode))
    {

        {

            int64_t duration = 0;
            int64_t seektabletime = 0;

            if ((mTotalsample > 0) && (mTableOfContents[0] > 0) && (mSamplesPerFrame > 0)
                    && mMeta->findInt64(kKeyDuration, &duration))
            {
                ape_parser_ctx_t ape_ctx;
                uint32_t filepos, blocks_to_skip;
                ape_ctx.samplerate = mSampleRate;
                ape_ctx.blocksperframe = mSamplesPerFrame;
                ape_ctx.totalframes = mTotalFrame;
                ape_ctx.seektable = mTableOfContents;
                ape_ctx.firstframe = mTableOfContents[0];

                if (ape_calc_seekpos_by_microsecond(&ape_ctx,
                                                    seekTimeUs,
                                                    &newframe,
                                                    &filepos,
                                                    &firstbyte,
                                                    &blocks_to_skip) < 0)
                {
                    LOGD("getseekto error exit");
                    return ERROR_UNSUPPORTED;
                }

                mCurrentPos = filepos;
                mCurrentTimeUs = (int64_t)newframe * mSamplesPerFrame * 1000000ll / mSampleRate;

                LOGD("getseekto seekTimeUs=%lld, Actual time%lld, filepos%x,frame %d, seekbyte %d", seekTimeUs, mCurrentTimeUs, mCurrentPos, newframe, firstbyte);

            }
            else
            {
                LOGD("getseekto parameter error exit");
                return ERROR_UNSUPPORTED;
            }


        }

    }


    if ((mFileoffset != 0)
            && (mCurrentPos >= mFileoffset))
    {
        LOGD("APESource::readAt to end filesize %x curr: %x", mFileoffset, mCurrentPos);
        return ERROR_END_OF_STREAM;
    }

    MediaBuffer *buffer;
    status_t err = mGroup->acquire_buffer(&buffer);

    if (err != OK)
    {
        LOGD("APESource::acquire_buffer fail");
        return err;
    }

    size_t frame_size;
    frame_size = kMaxFrameSize;
    ssize_t n = 0;	

#ifdef ENABLE_MMRIOTHREAD
    if (options != NULL && options->getSeekTo(&seekTimeUs, &mode))
    {
        ResetReadioPtr(mCurrentPos);        
    }
    n = ReadBitsteam(buffer->data(), frame_size);
#else    
    ///frame_size = mMaxBufferSize;
    n = mDataSource->readAt(mCurrentPos, buffer->data(), frame_size);    
#endif

    ///LOGE("APESource::readAt  %x, %x, %d, %d, %d, %d, %d", mCurrentPos, buffer->data(), buffer->size(), mTotalsample, bitrate, mSampleRate, frame_size);
    //ssize_t n = mDataSource->readAt(mCurrentPos, buffer->data(), frame_size);

    if ((mFileoffset != 0)
            && ((mCurrentPos + n) >= mFileoffset))
    {
        frame_size = mFileoffset - mCurrentPos;
        memset(buffer->data() + frame_size, 0, n - frame_size);
    }
    else if ((n < (ssize_t)frame_size)
             && (n > 0))
    {
        frame_size = n;
        off64_t fileoffset = 0;
        mDataSource->getSize(&fileoffset);
        LOGD("APESource::readAt not enough read %d frmsize %x, filepos %x, filesize %x", n, frame_size, mCurrentPos + frame_size, fileoffset);

        //if ((mCurrentPos + frame_size) >= fileoffset
        //        && (mCurrentPos + frame_size) < mTableOfContents[mTotalFrame - 1])
        if ((mCurrentPos + frame_size) >= fileoffset && (mCurrentPos + frame_size) < mTableOfContents[mSt_bound- 1])
        {
            memset(buffer->data(), 0, buffer->size());
            /// for this file is not complete error, frame buffer should not transfer to avoid decoding noise data.
            LOGD("APESource::file is not enough to end --> memset");
        }
    }
    else if (n <= 0)
    {
        buffer->release();
        buffer = NULL;
        LOGD("APESource::readAt EOS filepos %x frmsize %d", mCurrentPos, frame_size);
        return ERROR_END_OF_STREAM;
    }

    buffer->set_range(0, frame_size);

    if (options != NULL && options->getSeekTo(&seekTimeUs, &mode))
    {
        buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs);
        buffer->meta_data()->setInt32(kKeyNemFrame, newframe);
        buffer->meta_data()->setInt32(kKeySeekByte, firstbyte);
		*mSeekbyte = firstbyte;//for ape seek on acodec
		*mNewframe = newframe;//for ape seek on acodec
    }
	buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs);

    buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);

    mCurrentPos += frame_size;
    mCurrentTimeUs += (int64_t)(frame_size * 8000000ll) / bitrate ;
    
#ifdef ENABLE_MMRIOTHREAD    
    UpdateReadPtr(frame_size);
#endif

    *out = buffer;

    ///LOGE("APESource::kKeyTime done %x %lld", mCurrentPos, mCurrentTimeUs);
    return OK;
}
status_t Harness::testSeek(
        const char *componentName, const char *componentRole) {
    bool isEncoder =
        !strncmp(componentRole, "audio_encoder.", 14)
        || !strncmp(componentRole, "video_encoder.", 14);

    if (isEncoder) {
        // Not testing seek behaviour for encoders.

        printf("  * Not testing seek functionality for encoders.\n");
        return OK;
    }

    const char *mime = GetMimeFromComponentRole(componentRole);

    if (!mime) {
        printf("  * Cannot perform seek test with this componentRole (%s)\n",
               componentRole);

        return OK;
    }

    sp<MediaSource> source = CreateSourceForMime(mime);

    if (source == NULL) {
        printf("  * Unable to open test content for type '%s', "
               "skipping test of componentRole %s\n",
               mime, componentRole);

        return OK;
    }

    sp<MediaSource> seekSource = CreateSourceForMime(mime);
    if (source == NULL || seekSource == NULL) {
        return UNKNOWN_ERROR;
    }

    CHECK_EQ(seekSource->start(), (status_t)OK);

    sp<MediaSource> codec = OMXCodec::Create(
            mOMX, source->getFormat(), false /* createEncoder */,
            source, componentName);

    CHECK(codec != NULL);

    CHECK_EQ(codec->start(), (status_t)OK);

    int64_t durationUs;
    CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs));

    ALOGI("stream duration is %lld us (%.2f secs)",
         durationUs, durationUs / 1E6);

    static const int32_t kNumIterations = 5000;

    // We are always going to seek beyond EOS in the first iteration (i == 0)
    // followed by a linear read for the second iteration (i == 1).
    // After that it's all random.
    for (int32_t i = 0; i < kNumIterations; ++i) {
        int64_t requestedSeekTimeUs;
        int64_t actualSeekTimeUs;
        MediaSource::ReadOptions options;

        double r = uniform_rand();

        if ((i == 1) || (i > 0 && r < 0.5)) {
            // 50% chance of just continuing to decode from last position.

            requestedSeekTimeUs = -1;

            ALOGI("requesting linear read");
        } else {
            if (i == 0 || r < 0.55) {
                // 5% chance of seeking beyond end of stream.

                requestedSeekTimeUs = durationUs;

                ALOGI("requesting seek beyond EOF");
            } else {
                requestedSeekTimeUs =
                    (int64_t)(uniform_rand() * durationUs);

                ALOGI("requesting seek to %lld us (%.2f secs)",
                     requestedSeekTimeUs, requestedSeekTimeUs / 1E6);
            }

            MediaBuffer *buffer = NULL;
            options.setSeekTo(
                    requestedSeekTimeUs, MediaSource::ReadOptions::SEEK_NEXT_SYNC);

            if (seekSource->read(&buffer, &options) != OK) {
                CHECK(buffer == NULL);
                actualSeekTimeUs = -1;
            } else {
                CHECK(buffer != NULL);
                CHECK(buffer->meta_data()->findInt64(kKeyTime, &actualSeekTimeUs));
                CHECK(actualSeekTimeUs >= 0);

                buffer->release();
                buffer = NULL;
            }

            ALOGI("nearest keyframe is at %lld us (%.2f secs)",
                 actualSeekTimeUs, actualSeekTimeUs / 1E6);
        }

        status_t err;
        MediaBuffer *buffer;
        for (;;) {
            err = codec->read(&buffer, &options);
            options.clearSeekTo();
            if (err == INFO_FORMAT_CHANGED) {
                CHECK(buffer == NULL);
                continue;
            }
            if (err == OK) {
                CHECK(buffer != NULL);
                if (buffer->range_length() == 0) {
                    buffer->release();
                    buffer = NULL;
                    continue;
                }
            } else {
                CHECK(buffer == NULL);
            }

            break;
        }

        if (requestedSeekTimeUs < 0) {
            // Linear read.
            if (err != OK) {
                CHECK(buffer == NULL);
            } else {
                CHECK(buffer != NULL);
                buffer->release();
                buffer = NULL;
            }
        } else if (actualSeekTimeUs < 0) {
            EXPECT(err != OK,
                   "We attempted to seek beyond EOS and expected "
                   "ERROR_END_OF_STREAM to be returned, but instead "
                   "we got a valid buffer.");
            EXPECT(err == ERROR_END_OF_STREAM,
                   "We attempted to seek beyond EOS and expected "
                   "ERROR_END_OF_STREAM to be returned, but instead "
                   "we found some other error.");
            CHECK_EQ(err, (status_t)ERROR_END_OF_STREAM);
            CHECK(buffer == NULL);
        } else {
            EXPECT(err == OK,
                   "Expected a valid buffer to be returned from "
                   "OMXCodec::read.");
            CHECK(buffer != NULL);

            int64_t bufferTimeUs;
            CHECK(buffer->meta_data()->findInt64(kKeyTime, &bufferTimeUs));
            if (!CloseEnough(bufferTimeUs, actualSeekTimeUs)) {
                printf("\n  * Attempted seeking to %lld us (%.2f secs)",
                       requestedSeekTimeUs, requestedSeekTimeUs / 1E6);
                printf("\n  * Nearest keyframe is at %lld us (%.2f secs)",
                       actualSeekTimeUs, actualSeekTimeUs / 1E6);
                printf("\n  * Returned buffer was at %lld us (%.2f secs)\n\n",
                       bufferTimeUs, bufferTimeUs / 1E6);

                buffer->release();
                buffer = NULL;

                CHECK_EQ(codec->stop(), (status_t)OK);

                return UNKNOWN_ERROR;
            }

            buffer->release();
            buffer = NULL;
        }
    }

    CHECK_EQ(codec->stop(), (status_t)OK);

    return OK;
}
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
{
  MOZ_ASSERT(aSeekTimeUs >= -1);

  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aSeekTimeUs != -1) {
    MediaSource::ReadOptions options;
    options.setSeekTo(aSeekTimeUs);
    err = mVideoSource->read(&mVideoBuffer, &options);
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  if (err == OK && mVideoBuffer->range_length() > 0) {
    int64_t timeUs;
    int32_t keyFrame;

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      LOG("no frame time");
      return false;
    }

    if (timeUs < 0) {
      LOG("frame time %lld must be nonnegative", timeUs);
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
    }

    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
    size_t length = mVideoBuffer->range_length();

    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
      return false;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    LOG("mVideoSource INFO_FORMAT_CHANGED");
    if (!SetVideoFormat())
      return false;
    else
      return ReadVideo(aFrame, aSeekTimeUs);
  }
  else if (err == ERROR_END_OF_STREAM) {
    LOG("mVideoSource END_OF_STREAM");
  }
  else if (err != OK) {
    LOG("mVideoSource ERROR %#x", err);
  }

  return err == OK;
}
void NuPlayer::GenericSource::readBuffer(
        bool audio, int64_t seekTimeUs, int64_t *actualTimeUs) {
    Track *track = audio ? &mAudioTrack : &mVideoTrack;
    CHECK(track->mSource != NULL);

    if (actualTimeUs) {
        *actualTimeUs = seekTimeUs;
    }

    MediaSource::ReadOptions options;

    bool seeking = false;

    if (seekTimeUs >= 0) {
        options.setSeekTo(seekTimeUs);
        seeking = true;
    }

    for (;;) {
        MediaBuffer *mbuf;
        status_t err = track->mSource->read(&mbuf, &options);

        options.clearSeekTo();

        if (err == OK) {
            size_t outLength = mbuf->range_length();

            if (audio && mAudioIsVorbis) {
                outLength += sizeof(int32_t);
            }

            sp<ABuffer> buffer = new ABuffer(outLength);

            memcpy(buffer->data(),
                   (const uint8_t *)mbuf->data() + mbuf->range_offset(),
                   mbuf->range_length());

            if (audio && mAudioIsVorbis) {
                int32_t numPageSamples;
                if (!mbuf->meta_data()->findInt32(
                            kKeyValidSamples, &numPageSamples)) {
                    numPageSamples = -1;
                }

                memcpy(buffer->data() + mbuf->range_length(),
                       &numPageSamples,
                       sizeof(numPageSamples));
            }

            int64_t timeUs;
            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));

            buffer->meta()->setInt64("timeUs", timeUs);

            if (actualTimeUs) {
                *actualTimeUs = timeUs;
            }

            mbuf->release();
            mbuf = NULL;

            if (seeking) {
                track->mPackets->queueDiscontinuity(
                        ATSParser::DISCONTINUITY_SEEK, NULL);
            }

            track->mPackets->queueAccessUnit(buffer);
            break;
        } else if (err == INFO_FORMAT_CHANGED) {
#if 0
            track->mPackets->queueDiscontinuity(
                    ATSParser::DISCONTINUITY_FORMATCHANGE, NULL);
#endif
        } else {
            track->mPackets->signalEOS(err);
            break;
        }
    }
}
Exemple #12
0
int
CEncoderLame::encode(const sp<IMediaSource>& pMediaSource_in, const sp<IAudioSink>& pAudioSink_out)
{
    AUTO_LOG();

    CHECK_PTR_EXT(m_pGobalFlags, BAD_VALUE);
    CHECK_PTR_EXT(pMediaSource_in, BAD_VALUE);
    CHECK_PTR_EXT(pAudioSink_out, BAD_VALUE);

    sp<MetaData>  pMeta = pMediaSource_in->getFormat();
    CHECK_PTR_EXT(pMeta,  BAD_VALUE);

    int32_t iChannelNum = 0;
    bool chk = pMeta->findInt32(kKeyChannelCount, &iChannelNum);
    CHECK_IS_EXT((true == chk), UNKNOWN_ERROR);
    int  ret = lame_set_num_channels(m_pGobalFlags, iChannelNum);
    CHECK_IS_EXT((ret == OK), ret);

    // only support one channel and two channels
    CHECK_IS_EXT(((1 == iChannelNum) || (2 == iChannelNum)), INVALID_OPERATION);

    ret = pMediaSource_in->start();
    CHECK_IS_EXT((ret == OK), ret);

    MediaBuffer* pBuf = NULL;

    while (OK == (pMediaSource_in->read(&pBuf, NULL))) {
        if (pBuf == NULL) {
            break;
        }

        if ((pBuf->data() == NULL) || (pBuf->range_length() == 0)) {
            pBuf->release();
            pBuf = NULL;
            continue;
        }

        int16_t *pOrg = (int16_t *)((const char *)pBuf->data() + pBuf->range_offset());
        ssize_t iSamplesRead = pBuf->range_length() / 2;
        pOrg += iSamplesRead;
        iSamplesRead /= iChannelNum;

        CHECK_NE(iSamplesRead, 0);

        int iMP3BufSize = 1.25 * iSamplesRead + 7200;
        short int*     pPCMBufL = new short int[iSamplesRead];
        short int*     pPCMBufR = new short int[iSamplesRead];
        unsigned char* pMP3Buf  = new unsigned char[iMP3BufSize];

        if (iChannelNum == 2) {
            for (ssize_t i = iSamplesRead; --i >= 0;) {
                pPCMBufR[i] = *--pOrg;
                pPCMBufL[i] = *--pOrg;
            }
        } else if (iChannelNum == 1) {
            memset(pPCMBufR, 0, iSamplesRead * sizeof(short int));

            for (ssize_t i = iSamplesRead; --i >= 0;) {
                pPCMBufL[i] = *--pOrg;
            }
        } else {
            // EMPTY
        }

        int iOutSize = lame_encode_buffer(m_pGobalFlags, pPCMBufL, pPCMBufR, iSamplesRead, pMP3Buf, iMP3BufSize);
        int iWriteSize = iOutSize;

        if (iOutSize > 0) {
            iWriteSize = pAudioSink_out->write(pMP3Buf, iOutSize);
        }

        delete (pMP3Buf);
        delete (pPCMBufR);
        delete (pPCMBufL);


        pBuf->release();
        pBuf = NULL;

        CHECK_IS_EXT((iOutSize == iWriteSize), UNKNOWN_ERROR);
    }

    // last frame may remain
    {
        unsigned char* pMP3Buf  = new unsigned char[LAME_MAXMP3BUFFER];
        int iOutSize = lame_encode_flush(m_pGobalFlags, pMP3Buf, sizeof(pMP3Buf));
        int iWriteSize = iOutSize;

        if (iOutSize > 0) {
            iWriteSize = pAudioSink_out->write(pMP3Buf, iOutSize);
        }

        delete (pMP3Buf);

        CHECK_IS_EXT((iOutSize == iWriteSize), UNKNOWN_ERROR);
    }

    // write the tag3v1
    {
        unsigned char* pMP3Buf = new unsigned char[128];
        int iOutSize = lame_get_id3v1_tag(m_pGobalFlags, pMP3Buf, sizeof(pMP3Buf));
        int iWriteSize = iOutSize;

        if ((iOutSize > 0) && (((size_t)iOutSize) <= sizeof(pMP3Buf))) {
            iWriteSize = pAudioSink_out->write(pMP3Buf, iOutSize);
        }

        delete (pMP3Buf);

        CHECK_IS_EXT((iOutSize == iWriteSize), UNKNOWN_ERROR);
    }

    RETURN(OK);
}
Exemple #13
0
status_t AnotherPacketSource::read(
    MediaBuffer **out, const ReadOptions *options) {
#else
status_t AnotherPacketSource::read(
    MediaBuffer **out, const ReadOptions *) {
#endif
    *out = NULL;

    Mutex::Autolock autoLock(mLock);
    while (mEOSResult == OK && mBuffers.empty()) {
        mCondition.wait(mLock);
    }

    if (!mBuffers.empty()) {
        const sp<ABuffer> buffer = *mBuffers.begin();

#ifndef ANDROID_DEFAULT_CODE
        m_uiNextAduSeqNum = buffer->int32Data();
#endif
        mBuffers.erase(mBuffers.begin());

        int32_t discontinuity;
        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
            if (wasFormatChange(discontinuity)) {
                mFormat.clear();
            }

            return INFO_DISCONTINUITY;
        }

        sp<RefBase> object;
        if (buffer->meta()->findObject("format", &object)) {
            mFormat = static_cast<MetaData*>(object.get());
        }

        int64_t timeUs;
        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));

        MediaBuffer *mediaBuffer = new MediaBuffer(buffer);

        mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
#ifndef ANDROID_DEFAULT_CODE
        int32_t fgInvalidtimeUs=false;
        if(buffer->meta()->findInt32("invt", &fgInvalidtimeUs))
        {
            mediaBuffer->meta_data()->setInt32(kInvalidKeyTime, fgInvalidtimeUs);
        }

        int64_t seekTimeUs;
        ReadOptions::SeekMode seekMode;
        if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
            mediaBuffer->meta_data()->setInt64(kKeyTargetTime, seekTimeUs);
        }
#endif

        *out = mediaBuffer;
        return OK;
    }

    return mEOSResult;
}
Exemple #14
0
void* decoderThread(void *arg)
{
    AVCodecContext *avctx = (AVCodecContext*)arg;
    StagefrightContext *stagefrightContext = (StagefrightContext*)avctx->priv_data;
    const AVPixFmtDescriptor *pixelDescriptor = av_pix_fmt_desc_get(avctx->pix_fmt);
    Frame* frame;
    MediaBuffer *mediaBuffer;
    int32_t width, height;
    int status;
    int imageLinesize[3];
    const uint8_t *imageData[3];
    int64_t frameTimestamp = 0;

    do {
        mediaBuffer = NULL;
        frame = NULL;

        //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Pushing buffer to decoder\n");
        status = stagefrightContext->decoder->read(&mediaBuffer);

        //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Decoder read returned %d\n", status);
        switch(status) {
        case OK: {
            sp<MetaData> imageFormat = stagefrightContext->decoder->getFormat();
            imageFormat->findInt32(kKeyWidth , &width);
            imageFormat->findInt32(kKeyHeight, &height);
            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Output format is %d x %d\n", width, height);

            frame = allocFrame(0);
            if (!frame) {
                av_log(avctx, AV_LOG_ERROR, "decoderThread: Can't allocate frame in decoder thread\n");
                break;
            }
            frame->ffmpegFrame = av_frame_alloc();
            if(frame->ffmpegFrame == NULL) {
                av_log(avctx, AV_LOG_ERROR, "decoderThread: Can't allocate AVFrame in decoder thread\n");
                freeFrame(frame, false);
                break;
            }

            // The OMX.SEC decoder doesn't signal the modified width/height
            if (stagefrightContext->decoderName                             &&
                    !strncmp(stagefrightContext->decoderName, "OMX.SEC", 7) &&
                    (width & 15 || height & 15)                             &&
                    ((width + 15)&~15) * ((height + 15)&~15) * 3/2 == mediaBuffer->range_length()) {

                width = (width + 15)&~15;
                height = (height + 15)&~15;
            }

            if (!avctx->width || !avctx->height || avctx->width > width || avctx->height > height) {
                avctx->width  = width;
                avctx->height = height;
            }

            if(avpicture_fill((AVPicture *)(frame->ffmpegFrame), (uint8_t*)mediaBuffer->data(), avctx->pix_fmt, width, height) < 0) {
                av_log(avctx, AV_LOG_ERROR, "decoderThread: Can't do avpicture_fill\n");
                freeFrame(frame, false);
                break;
            }

            frame->ffmpegFrame->format = avctx->pix_fmt;
            frame->ffmpegFrame->height = height;
            frame->ffmpegFrame->width = width;
            frame->ffmpegFrame->channel_layout = 0;

//            V/ffmpeg  (10410): ret_frame: format -1
//            V/ffmpeg  (10410): ret_frame: 0 x 0
//            V/ffmpeg  (10410): ret_frame: channels 0
//            V/ffmpeg  (10410): ret_frame: channel_layout 0
//            V/ffmpeg  (10410): ret_frame: format -1
//            V/ffmpeg  (10410): ret_frame: nb_samples 0

            //pgm_save(frame->ffmpegFrame->data[0], frame->ffmpegFrame->linesize[0], width, height, "/sdcard/decoded.pgm");

            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Hurry up into the output :-)\n");
            if (mediaBuffer->meta_data()->findInt64(kKeyTime, &frameTimestamp) &&
                    stagefrightContext->frameIndexToTimestampMap->count(frameTimestamp) > 0) {

                //av_log(avctx, AV_LOG_DEBUG, "decoderThread: writing pts\n");
                frame->ffmpegFrame->pts = (*(stagefrightContext->frameIndexToTimestampMap))[frameTimestamp].pts;
                //av_log(avctx, AV_LOG_DEBUG, "decoderThread: writing reorder opaque\n");
                frame->ffmpegFrame->reordered_opaque = (*stagefrightContext->frameIndexToTimestampMap)[frameTimestamp].reordered_opaque;
                //av_log(avctx, AV_LOG_DEBUG, "decoderThread: erasing timestamp from map\n");
                stagefrightContext->frameIndexToTimestampMap->erase(frameTimestamp);
            }

            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Waiting for a slot in the output\n");
            while (true) {
                pthread_mutex_lock(&stagefrightContext->outputQueueMutex);
                if (stagefrightContext->outputFrameQueue->size() >= 10) {
                    pthread_mutex_unlock(&stagefrightContext->outputQueueMutex);
                    usleep(10000);
                    continue;
                }
                break;
            }
            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: pushing frame to output queue\n");
            stagefrightContext->outputFrameQueue->push_back(frame);
            pthread_mutex_unlock(&stagefrightContext->outputQueueMutex);
            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Pushed decoded frame to output queue\n");
            mediaBuffer->release();
            break;
        }
        case INFO_FORMAT_CHANGED:
            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: format has changed\n");
            if(mediaBuffer) mediaBuffer->release();
            freeFrame(frame, false);
            continue;
        default: {
            //av_log(avctx, AV_LOG_DEBUG, "decoderThread: Decoder status unknown. Exiting\n");
            if(mediaBuffer) mediaBuffer->release();
            freeFrame(frame, false);
            goto decoder_exit;
        }
        }
    } while (!stagefrightContext->stopDecoderThread);
decoder_exit:
    //av_log(avctx, AV_LOG_DEBUG, "decoderThread: return 0\n");
    stagefrightContext->decoderThreadExited = true;
    return 0;
}
Exemple #15
0
status_t AMRSource::read(
    MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
        uint64_t seekFrame = seekTimeUs / 20000ll;  // 20ms per frame.
        mCurrentTimeUs = seekFrame * 20000ll;
        uint32_t framesize=0;
        uint64_t offset = 0, numframes = 0;
        seekFrame = seekFrame + 1; //why seekframe+1, since the array starts from zero
        LOGI("seekframe %lld", seekFrame);
        for (List<AMRFrameTableEntry>::iterator it = mAMRFrameTableEntries.begin();
                it != mAMRFrameTableEntries.end(); ++it) {

            numframes = it->mNumFrames;
            framesize = it->mFrameSize;
            if(seekFrame >= mTotalFrames)
            {
                LOGE("seek beyond EOF");
                return ERROR_OUT_OF_RANGE;
            }

            if(seekFrame > numframes)
            {
                offset = offset + (numframes * framesize);
                seekFrame = seekFrame - numframes;
                LOGV("> offset %lld seekFrame %lld numframes %lld framesize %d", offset, seekFrame, numframes, framesize);
            }
            else
            {
                offset = offset + (seekFrame * framesize);
                LOGV("!> offset %lld numframes %lld framesize %d", offset, numframes, framesize);
                break;
            }
        }
        mOffset = offset;
    }

    uint8_t header;
    ssize_t n = mDataSource->readAt(mOffset, &header, 1);

    if (n < 1) {
        return ERROR_END_OF_STREAM;
    }

    if (header & 0x83) {
        // Padding bits must be 0.

        LOGE("padding bits must be 0, header is 0x%02x", header);

        return ERROR_MALFORMED;
    }

    unsigned FT = (header >> 3) & 0x0f;

    if (FT > MAX_AMRMODE) {

        LOGE("illegal AMR frame type %d", FT);

        return ERROR_MALFORMED;
    }

    size_t frameSize = getFrameSize(mIsWide, FT);

    MediaBuffer *buffer;
    status_t err = mGroup->acquire_buffer(&buffer);
    if (err != OK) {
        return err;
    }

    n = mDataSource->readAt(mOffset, buffer->data(), frameSize);

    if (n != (ssize_t)frameSize) {
        buffer->release();
        buffer = NULL;

        return ERROR_IO;
    }

    buffer->set_range(0, frameSize);
    buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs);
    buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);

    mOffset += frameSize;
    mCurrentTimeUs += 20000;  // Each frame is 20ms

    *out = buffer;

    return OK;
}
/**
 *******************************************************************************
 * @brief   Gets an access unit (AU) from the stream handler source.
 * @note    AU is the smallest possible amount of data to be decoded by decoder
 *
 * @param   context:       (IN) Context of the reader
 * @param   pStreamHandler (IN) The stream handler of the stream to make jump
 * @param   pAccessUnit    (I/O)Pointer to an access unit to fill with read data
 * @return    M4NO_ERROR        there is no error
 * @return    M4ERR_PARAMETER   at least one parameter is not properly set
 * @returns   M4ERR_ALLOC       memory allocation failed
 * @returns   M4WAR_NO_MORE_AU  there are no more access unit in the stream
 *******************************************************************************
*/
M4OSA_ERR VideoEditorMp3Reader_getNextAu(M4OSA_Context context,
        M4_StreamHandler *pStreamHandler, M4_AccessUnit *pAccessUnit) {
    VideoEditorMp3Reader_Context *pReaderContext =
        (VideoEditorMp3Reader_Context*)context;
    M4OSA_ERR err = M4NO_ERROR;
    M4SYS_AccessUnit* pAu;
    MediaBuffer *mAudioBuffer;
    MediaSource::ReadOptions options;

    ALOGV("VideoEditorMp3Reader_getNextAu start");
    M4OSA_DEBUG_IF1((pReaderContext == 0), M4ERR_PARAMETER,
        "VideoEditorMp3Reader_getNextAu: invalid context");
    M4OSA_DEBUG_IF1((pStreamHandler == 0), M4ERR_PARAMETER,
        "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_StreamHandler");
    M4OSA_DEBUG_IF1((pAccessUnit == 0),    M4ERR_PARAMETER,
        "VideoEditorMp3Reader_getNextAu: invalid pointer to M4_AccessUnit");

    if (pStreamHandler == (M4_StreamHandler*)pReaderContext->\
        mAudioStreamHandler) {
        pAu = &pReaderContext->mAudioAu;
    } else {
        ALOGV("VideoEditorMp3Reader_getNextAu: StreamHandler is not known\n");
        return M4ERR_PARAMETER;
    }

    if (pReaderContext->mSeeking) {
        options.setSeekTo(pReaderContext->mSeekTime);
    }

    pReaderContext->mMediaSource->read(&mAudioBuffer, &options);

    if (mAudioBuffer != NULL) {
        if ((pAu->dataAddress == NULL) ||
            (pAu->size < mAudioBuffer->range_length())) {
            if (pAu->dataAddress != NULL) {
                free((M4OSA_Int32*)pAu->dataAddress);
                pAu->dataAddress = NULL;
            }
            pAu->dataAddress = (M4OSA_Int32*)M4OSA_32bitAlignedMalloc(
                (mAudioBuffer->range_length() + 3) & ~0x3,
                M4READER_MP3, (M4OSA_Char*)"pAccessUnit->m_dataAddress" );

            if (pAu->dataAddress == NULL) {
                ALOGV("VideoEditorMp3Reader_getNextAu malloc failed");
                pReaderContext->mMediaSource->stop();
                pReaderContext->mMediaSource.clear();
                pReaderContext->mDataSource.clear();

                return M4ERR_ALLOC;
            }
        }
        pAu->size = mAudioBuffer->range_length();
        memcpy((M4OSA_MemAddr8)pAu->dataAddress,
            (const char *)mAudioBuffer->data() + mAudioBuffer->range_offset(),
            mAudioBuffer->range_length());

        mAudioBuffer->meta_data()->findInt64(kKeyTime, (int64_t*)&pAu->CTS);


        pAu->CTS = pAu->CTS / 1000; /*converting the microsec to millisec */
        pAu->DTS  = pAu->CTS;
        pAu->attribute = M4SYS_kFragAttrOk;
        mAudioBuffer->release();

        ALOGV("VideoEditorMp3Reader_getNextAu AU CTS = %ld",pAu->CTS);

        pAccessUnit->m_dataAddress = (M4OSA_Int8*) pAu->dataAddress;
        pAccessUnit->m_size = pAu->size;
        pAccessUnit->m_CTS = pAu->CTS;
        pAccessUnit->m_DTS = pAu->DTS;
        pAccessUnit->m_attribute = pAu->attribute;
    } else {
        ALOGV("VideoEditorMp3Reader_getNextAu EOS reached.");
        pAccessUnit->m_size=0;
        err = M4WAR_NO_MORE_AU;
    }
    pAu->nbFrag = 0;

    options.clearSeekTo();
    pReaderContext->mSeeking = M4OSA_FALSE;
    mAudioBuffer = NULL;
    ALOGV("VideoEditorMp3Reader_getNextAu end");

    return err;
}
status_t AMRSource::read(
        MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
        size_t size;
        int64_t seekFrame = seekTimeUs / 20000ll;  // 20ms per frame.
        mCurrentTimeUs = seekFrame * 20000ll;

        size_t index = seekFrame < 0 ? 0 : seekFrame / 50;
        if (index >= mOffsetTableLength) {
            index = mOffsetTableLength - 1;
        }

        mOffset = mOffsetTable[index] + (mIsWide ? 9 : 6);

        for (size_t i = 0; i< seekFrame - index * 50; i++) {
            status_t err;
            if ((err = getFrameSizeByOffset(mDataSource, mOffset,
                            mIsWide, &size)) != OK) {
                return err;
            }
            mOffset += size;
        }
    }

    uint8_t header;
    ssize_t n = mDataSource->readAt(mOffset, &header, 1);

    if (n < 1) {
        return ERROR_END_OF_STREAM;
    }

    if (header & 0x83) {
        // Padding bits must be 0.

        ALOGE("padding bits must be 0, header is 0x%02x", header);

        return ERROR_MALFORMED;
    }

    unsigned FT = (header >> 3) & 0x0f;

    size_t frameSize = getFrameSize(mIsWide, FT);
    if (frameSize == 0) {
        return ERROR_MALFORMED;
    }

    MediaBuffer *buffer;
    status_t err = mGroup->acquire_buffer(&buffer);
    if (err != OK) {
        return err;
    }

    n = mDataSource->readAt(mOffset, buffer->data(), frameSize);

    if (n != (ssize_t)frameSize) {
        buffer->release();
        buffer = NULL;

        return ERROR_IO;
    }

    buffer->set_range(0, frameSize);
    buffer->meta_data()->setInt64(kKeyTime, mCurrentTimeUs);
    buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);

    mOffset += frameSize;
    mCurrentTimeUs += 20000;  // Each frame is 20ms

    *out = buffer;

    return OK;
}
Exemple #18
0
static VideoFrame *extractVideoFrameWithCodecFlags(
        OMXClient *client,
        const sp<MetaData> &trackMeta,
        const sp<MediaSource> &source,
        uint32_t flags,
        int64_t frameTimeUs,
        int seekMode) {
    sp<MediaSource> decoder =
        OMXCodec::Create(
                client->interface(), source->getFormat(), false, source,
                NULL, flags | OMXCodec::kClientNeedsFramebuffer);

    if (decoder.get() == NULL) {
        ALOGV("unable to instantiate video decoder.");

        return NULL;
    }

    status_t err = decoder->start();
    if (err != OK) {
        ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err);
        return NULL;
    }

    // Read one output buffer, ignore format change notifications
    // and spurious empty buffers.

    MediaSource::ReadOptions options;
    if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
        seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {

        ALOGE("Unknown seek mode: %d", seekMode);
        return NULL;
    }

    MediaSource::ReadOptions::SeekMode mode =
            static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);

    int64_t thumbNailTime;
    if (frameTimeUs < 0) {
        if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)
                || thumbNailTime < 0) {
            thumbNailTime = 0;
        }
        options.setSeekTo(thumbNailTime, mode);
    } else {
        thumbNailTime = -1;
        options.setSeekTo(frameTimeUs, mode);
    }

    MediaBuffer *buffer = NULL;
    do {
        if (buffer != NULL) {
            buffer->release();
            buffer = NULL;
        }
        err = decoder->read(&buffer, &options);
        options.clearSeekTo();
    } while (err == INFO_FORMAT_CHANGED
             || (buffer != NULL && buffer->range_length() == 0));

    if (err != OK) {
        CHECK_EQ(buffer, NULL);

        ALOGV("decoding frame failed.");
        decoder->stop();

        return NULL;
    }

    ALOGV("successfully decoded video frame.");

    int32_t unreadable;
    if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)
            && unreadable != 0) {
        ALOGV("video frame is unreadable, decoder does not give us access "
             "to the video data.");

        buffer->release();
        buffer = NULL;

        decoder->stop();

        return NULL;
    }

    int64_t timeUs;
    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
    if (thumbNailTime >= 0) {
        if (timeUs != thumbNailTime) {
            const char *mime;
            CHECK(trackMeta->findCString(kKeyMIMEType, &mime));

            ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
                 thumbNailTime, timeUs, mime);
        }
    }

    sp<MetaData> meta = decoder->getFormat();

    int32_t width, height;
    CHECK(meta->findInt32(kKeyWidth, &width));
    CHECK(meta->findInt32(kKeyHeight, &height));

    int32_t crop_left, crop_top, crop_right, crop_bottom;
    if (!meta->findRect(
                kKeyCropRect,
                &crop_left, &crop_top, &crop_right, &crop_bottom)) {
        crop_left = crop_top = 0;
        crop_right = width - 1;
        crop_bottom = height - 1;
    }

    int32_t rotationAngle;
    if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
        rotationAngle = 0;  // By default, no rotation
    }

    VideoFrame *frame = new VideoFrame;
    frame->mWidth = crop_right - crop_left + 1;
    frame->mHeight = crop_bottom - crop_top + 1;
    frame->mDisplayWidth = frame->mWidth;
    frame->mDisplayHeight = frame->mHeight;
    frame->mSize = frame->mWidth * frame->mHeight * 2;
    frame->mData = new uint8_t[frame->mSize];
    frame->mRotationAngle = rotationAngle;

    int32_t displayWidth, displayHeight;
    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
        frame->mDisplayWidth = displayWidth;
    }
    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
        frame->mDisplayHeight = displayHeight;
    }

    int32_t srcFormat;
    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat));

    ColorConverter converter(
            (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);

#ifdef QCOM_HARDWARE
    if (converter.isValid()) {
        err = converter.convert(
            (const uint8_t *)buffer->data() + buffer->range_offset(),
            width, height,
            crop_left, crop_top, crop_right, crop_bottom,
            frame->mData,
            frame->mWidth,
            frame->mHeight,
            0, 0, frame->mWidth - 1, frame->mHeight - 1);
    }
    else {
        err = ERROR_UNSUPPORTED;
    }
#else
    CHECK(converter.isValid());

    err = converter.convert(
            (const uint8_t *)buffer->data() + buffer->range_offset(),
            width, height,
            crop_left, crop_top, crop_right, crop_bottom,
            frame->mData,
            frame->mWidth,
            frame->mHeight,
            0, 0, frame->mWidth - 1, frame->mHeight - 1);
#endif

    buffer->release();
    buffer = NULL;

    decoder->stop();

    if (err != OK) {
        ALOGE("Colorconverter failed to convert frame.");

        delete frame;
        frame = NULL;
    }

    return frame;
}
Exemple #19
0
status_t MyVorbisExtractor::init() {
    mMeta = new MetaData;
#ifdef MTK_AOSP_ENHANCEMENT
    if(mMeta.get() == NULL) return NO_MEMORY;
#endif
    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS);

    MediaBuffer *packet;
    status_t err;
    if ((err = readNextPacket(&packet)) != OK) {
        return err;
    }
    ALOGV("read packet of size %zu\n", packet->range_length());
    err = verifyHeader(packet, 1);
    packet->release();
    packet = NULL;
    if (err != OK) {
        return err;
    }

    if ((err = readNextPacket(&packet)) != OK) {
        return err;
    }
    ALOGV("read packet of size %zu\n", packet->range_length());
    err = verifyHeader(packet, 3);
    packet->release();
    packet = NULL;
    if (err != OK) {
        return err;
    }

    if ((err = readNextPacket(&packet)) != OK) {
        return err;
    }
    ALOGV("read packet of size %zu\n", packet->range_length());
    err = verifyHeader(packet, 5);
    packet->release();
    packet = NULL;
    if (err != OK) {
        return err;
    }

    mFirstDataOffset = mOffset + mCurrentPageSize;

    off64_t size;
    uint64_t lastGranulePosition;
    if (!(mSource->flags() & DataSource::kIsCachingDataSource)
            && mSource->getSize(&size) == OK
            && findPrevGranulePosition(size, &lastGranulePosition) == OK) {
        // Let's assume it's cheap to seek to the end.
        // The granule position of the final page in the stream will
        // give us the exact duration of the content, something that
        // we can only approximate using avg. bitrate if seeking to
        // the end is too expensive or impossible (live streaming).

        int64_t durationUs = lastGranulePosition * 1000000ll / mVi.rate;

        mMeta->setInt64(kKeyDuration, durationUs);

#ifndef MTK_AOSP_ENHANCEMENT
        buildTableOfContents();//move toc build to start()
#else
        mFileSize = size;
#endif

    }

    return OK;
}
status_t AMRNBDecoder::read(
        MediaBuffer **out, const ReadOptions *options) {
    status_t err;

    *out = NULL;

    int64_t seekTimeUs;
    if (options && options->getSeekTo(&seekTimeUs)) {
        CHECK(seekTimeUs >= 0);

        mNumSamplesOutput = 0;

        if (mInputBuffer) {
            mInputBuffer->release();
            mInputBuffer = NULL;
        }
    } else {
        seekTimeUs = -1;
    }

    if (mInputBuffer == NULL) {
        err = mSource->read(&mInputBuffer, options);

        if (err != OK) {
            return err;
        }

        int64_t timeUs;
        if (mInputBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
            mAnchorTimeUs = timeUs;
            mNumSamplesOutput = 0;
        } else {
            // We must have a new timestamp after seeking.
            CHECK(seekTimeUs < 0);
        }
    }

    MediaBuffer *buffer;
    CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), OK);

    const uint8_t *inputPtr =
        (const uint8_t *)mInputBuffer->data() + mInputBuffer->range_offset();

    size_t numBytesRead =
        AMRDecode(mState,
          (Frame_Type_3GPP)((inputPtr[0] >> 3) & 0x0f),
          (UWord8 *)&inputPtr[1],
          static_cast<int16_t *>(buffer->data()),
          MIME_IETF);

    ++numBytesRead;  // Include the frame type header byte.

    buffer->set_range(0, kNumSamplesPerFrame * sizeof(int16_t));

    if (numBytesRead > mInputBuffer->range_length()) {
        // This is bad, should never have happened, but did. Abort now.

        buffer->release();
        buffer = NULL;

        return ERROR_MALFORMED;
    }

    mInputBuffer->set_range(
            mInputBuffer->range_offset() + numBytesRead,
            mInputBuffer->range_length() - numBytesRead);

    if (mInputBuffer->range_length() == 0) {
        mInputBuffer->release();
        mInputBuffer = NULL;
    }

    buffer->meta_data()->setInt64(
            kKeyTime,
            mAnchorTimeUs
                + (mNumSamplesOutput * 1000000) / kSampleRate);

    mNumSamplesOutput += kNumSamplesPerFrame;

    *out = buffer;

    return OK;
}
 virtual status_t readMultiple(
         Vector<MediaBuffer *> *buffers, uint32_t maxNumBuffers, const ReadOptions *options) {
     ALOGV("readMultiple");
     if (buffers == NULL || !buffers->isEmpty()) {
         return BAD_VALUE;
     }
     Parcel data, reply;
     data.writeInterfaceToken(BpMediaSource::getInterfaceDescriptor());
     data.writeUint32(maxNumBuffers);
     if (options != nullptr) {
         data.writeByteArray(sizeof(*options), (uint8_t*) options);
     }
     status_t ret = remote()->transact(READMULTIPLE, data, &reply);
     mMemoryCache.gc();
     if (ret != NO_ERROR) {
         return ret;
     }
     // wrap the returned data in a vector of MediaBuffers
     int32_t buftype;
     uint32_t bufferCount = 0;
     while ((buftype = reply.readInt32()) != NULL_BUFFER) {
         LOG_ALWAYS_FATAL_IF(bufferCount >= maxNumBuffers,
                 "Received %u+ buffers and requested %u buffers",
                 bufferCount + 1, maxNumBuffers);
         MediaBuffer *buf;
         if (buftype == SHARED_BUFFER || buftype == SHARED_BUFFER_INDEX) {
             uint64_t index = reply.readUint64();
             ALOGV("Received %s index %llu",
                     buftype == SHARED_BUFFER ? "SHARED_BUFFER" : "SHARED_BUFFER_INDEX",
                     (unsigned long long) index);
             sp<IMemory> mem;
             if (buftype == SHARED_BUFFER) {
                 sp<IBinder> binder = reply.readStrongBinder();
                 mem = interface_cast<IMemory>(binder);
                 LOG_ALWAYS_FATAL_IF(mem.get() == nullptr,
                         "Received NULL IMemory for shared buffer");
                 mMemoryCache.insert(index, mem);
             } else {
                 mem = mMemoryCache.lookup(index);
                 LOG_ALWAYS_FATAL_IF(mem.get() == nullptr,
                         "Received invalid IMemory index for shared buffer: %llu",
                         (unsigned long long)index);
             }
             size_t offset = reply.readInt32();
             size_t length = reply.readInt32();
             buf = new RemoteMediaBufferWrapper(mem);
             buf->set_range(offset, length);
             buf->meta_data()->updateFromParcel(reply);
         } else { // INLINE_BUFFER
             int32_t len = reply.readInt32();
             ALOGV("INLINE_BUFFER status %d and len %d", ret, len);
             buf = new MediaBuffer(len);
             reply.read(buf->data(), len);
             buf->meta_data()->updateFromParcel(reply);
         }
         buffers->push_back(buf);
         ++bufferCount;
         ++mBuffersSinceStop;
     }
     ret = reply.readInt32();
     ALOGV("readMultiple status %d, bufferCount %u, sinceStop %u",
             ret, bufferCount, mBuffersSinceStop);
     return ret;
 }
void OmxDecoder::ReleaseVideoBuffer() {
  if (mVideoBuffer) {
    mVideoBuffer->release();
    mVideoBuffer = NULL;
  }
}
void Converter::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatMediaPullerNotify:
        {
            int32_t what;
            CHECK(msg->findInt32("what", &what));

            if (!mIsPCMAudio && mEncoder == NULL) {
                ALOGV("got msg '%s' after encoder shutdown.",
                      msg->debugString().c_str());

                if (what == MediaPuller::kWhatAccessUnit) {
                    sp<ABuffer> accessUnit;
                    CHECK(msg->findBuffer("accessUnit", &accessUnit));

                    accessUnit->setMediaBufferBase(NULL);
                }
                break;
            }

            if (what == MediaPuller::kWhatEOS) {
                mInputBufferQueue.push_back(NULL);

                feedEncoderInputBuffers();

                scheduleDoMoreWork();
            } else {
                CHECK_EQ(what, MediaPuller::kWhatAccessUnit);

                sp<ABuffer> accessUnit;
                CHECK(msg->findBuffer("accessUnit", &accessUnit));

                if (mNumFramesToDrop > 0 || mEncodingSuspended) {
                    if (mNumFramesToDrop > 0) {
                        --mNumFramesToDrop;
                        ALOGI("dropping frame.");
                    }

                    accessUnit->setMediaBufferBase(NULL);
                    break;
                }

#if 0
                MediaBuffer *mbuf =
                    (MediaBuffer *)(accessUnit->getMediaBufferBase());
                if (mbuf != NULL) {
                    ALOGI("queueing mbuf %p", mbuf);
                    mbuf->release();
                }
#endif

#if ENABLE_SILENCE_DETECTION
                if (!mIsVideo) {
                    if (IsSilence(accessUnit)) {
                        if (mInSilentMode) {
                            break;
                        }

                        int64_t nowUs = ALooper::GetNowUs();

                        if (mFirstSilentFrameUs < 0ll) {
                            mFirstSilentFrameUs = nowUs;
                        } else if (nowUs >= mFirstSilentFrameUs + 10000000ll) {
                            mInSilentMode = true;
                            ALOGI("audio in silent mode now.");
                            break;
                        }
                    } else {
                        if (mInSilentMode) {
                            ALOGI("audio no longer in silent mode.");
                        }
                        mInSilentMode = false;
                        mFirstSilentFrameUs = -1ll;
                    }
                }
#endif

                mInputBufferQueue.push_back(accessUnit);

                feedEncoderInputBuffers();

                scheduleDoMoreWork();
            }
            break;
        }

        case kWhatEncoderActivity:
        {
#if 0
            int64_t whenUs;
            if (msg->findInt64("whenUs", &whenUs)) {
                int64_t nowUs = ALooper::GetNowUs();
                ALOGI("[%s] kWhatEncoderActivity after %lld us",
                      mIsVideo ? "video" : "audio", nowUs - whenUs);
            }
#endif

            mDoMoreWorkPending = false;

            if (mEncoder == NULL) {
                break;
            }

            status_t err = doMoreWork();

            if (err != OK) {
                notifyError(err);
            } else {
                scheduleDoMoreWork();
            }
            break;
        }

        case kWhatRequestIDRFrame:
        {
            if (mEncoder == NULL) {
                break;
            }

            if (mIsVideo) {
                ALOGV("requesting IDR frame");
                mEncoder->requestIDRFrame();
            }
            break;
        }

        case kWhatShutdown:
        {
            ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio");

            releaseEncoder();

            AString mime;
            CHECK(mOutputFormat->findString("mime", &mime));
            ALOGI("encoder (%s) shut down.", mime.c_str());

            sp<AMessage> notify = mNotify->dup();
            notify->setInt32("what", kWhatShutdownCompleted);
            notify->post();
            break;
        }

        case kWhatDropAFrame:
        {
            ++mNumFramesToDrop;
            break;
        }

        case kWhatReleaseOutputBuffer:
        {
            if (mEncoder != NULL) {
                size_t bufferIndex;
                CHECK(msg->findInt32("bufferIndex", (int32_t*)&bufferIndex));
                CHECK(bufferIndex < mEncoderOutputBuffers.size());
                mEncoder->releaseOutputBuffer(bufferIndex);
            }
            break;
        }

        case kWhatSuspendEncoding:
        {
            int32_t suspend;
            CHECK(msg->findInt32("suspend", &suspend));

            mEncodingSuspended = suspend;

            if (mFlags & FLAG_USE_SURFACE_INPUT) {
                sp<AMessage> params = new AMessage;
                params->setInt32("drop-input-frames",suspend);
                mEncoder->setParameters(params);
            }
            break;
        }

        default:
            TRESPASS();
    }
}
void OmxDecoder::ReleaseAudioBuffer() {
  if (mAudioBuffer) {
    mAudioBuffer->release();
    mAudioBuffer = NULL;
  }
}
int main() {
    // We only have an AMR-WB encoder on sholes...
    static bool outputWBAMR = false;
    static const int32_t kSampleRate = outputWBAMR ? 16000 : 8000;
    static const int32_t kNumChannels = 1;

    android::ProcessState::self()->startThreadPool();

    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);

#if 0
    sp<MediaSource> source = new SineSource(kSampleRate, kNumChannels);
#else
    sp<MediaSource> source = new AudioSource(
            AUDIO_SOURCE_DEFAULT,
            kSampleRate,
            audio_channel_in_mask_from_count(kNumChannels));
#endif

    sp<MetaData> meta = new MetaData;

    meta->setCString(
            kKeyMIMEType,
            outputWBAMR ? MEDIA_MIMETYPE_AUDIO_AMR_WB
                        : MEDIA_MIMETYPE_AUDIO_AMR_NB);

    meta->setInt32(kKeyChannelCount, kNumChannels);
    meta->setInt32(kKeySampleRate, kSampleRate);

    int32_t maxInputSize;
    if (source->getFormat()->findInt32(kKeyMaxInputSize, &maxInputSize)) {
        meta->setInt32(kKeyMaxInputSize, maxInputSize);
    }

    sp<MediaSource> encoder = OMXCodec::Create(
            client.interface(),
            meta, true /* createEncoder */,
            source);

#if 1
    sp<AMRWriter> writer = new AMRWriter("/sdcard/out.amr");
    writer->addSource(encoder);
    writer->start();
    sleep(10);
    writer->stop();
#else
    sp<MediaSource> decoder = OMXCodec::Create(
            client.interface(),
            meta, false /* createEncoder */,
            encoder);

#if 0
    AudioPlayer *player = new AudioPlayer(NULL);
    player->setSource(decoder);

    player->start();

    sleep(10);

    player->stop();

    delete player;
    player = NULL;
#elif 0
    CHECK_EQ(decoder->start(), (status_t)OK);

    MediaBuffer *buffer;
    while (decoder->read(&buffer) == OK) {
        // do something with buffer

        putchar('.');
        fflush(stdout);

        buffer->release();
        buffer = NULL;
    }

    CHECK_EQ(decoder->stop(), (status_t)OK);
#endif
#endif

    return 0;
}
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs)
{
  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aSeekTimeUs != -1) {
    MediaSource::ReadOptions options;
    options.setSeekTo(aSeekTimeUs);
    err = mVideoSource->read(&mVideoBuffer, &options);
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  if (err == OK && mVideoBuffer->range_length() > 0) {
    int64_t timeUs;
    int32_t unreadable;
    int32_t keyFrame;

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      LOG("no key time");
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
       keyFrame = 0;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
      unreadable = 0;
    }

    LOG("data: %p size: %u offset: %u length: %u unreadable: %d",
        mVideoBuffer->data(), 
        mVideoBuffer->size(),
        mVideoBuffer->range_offset(),
        mVideoBuffer->range_length(),
        unreadable);

    char *data = reinterpret_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
    size_t length = mVideoBuffer->range_length();

    if (unreadable) {
      LOG("video frame is unreadable");
    }

    if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
      return false;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    return SetVideoFormat();
  }
  else if (err == ERROR_END_OF_STREAM) {
    return false;
  }

  return true;
}
status_t AMRWriter::threadFunc() {
    mEstimatedDurationUs = 0;
    mEstimatedSizeBytes = 0;
    bool stoppedPrematurely = true;
    int64_t previousPausedDurationUs = 0;
    int64_t maxTimestampUs = 0;
    status_t err = OK;

    prctl(PR_SET_NAME, (unsigned long)"AMRWriter", 0, 0, 0);
    while (!mDone) {
        MediaBuffer *buffer;
        err = mSource->read(&buffer);

        if (err != OK) {
            break;
        }

        if (mPaused) {
            buffer->release();
            buffer = NULL;
            continue;
        }

        mEstimatedSizeBytes += buffer->range_length();
        if (exceedsFileSizeLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, 0);
            break;
        }

        int64_t timestampUs;
        CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
        if (timestampUs > mEstimatedDurationUs) {
            mEstimatedDurationUs = timestampUs;
        }
        if (mResumed) {
            previousPausedDurationUs += (timestampUs - maxTimestampUs - 20000);
            mResumed = false;
        }
        timestampUs -= previousPausedDurationUs;
        ALOGV("time stamp: %lld, previous paused duration: %lld",
                timestampUs, previousPausedDurationUs);
        if (timestampUs > maxTimestampUs) {
            maxTimestampUs = timestampUs;
        }

        if (exceedsFileDurationLimit()) {
            buffer->release();
            buffer = NULL;
            notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
            break;
        }
        ssize_t n = write(mFd,
                        (const uint8_t *)buffer->data() + buffer->range_offset(),
                        buffer->range_length());

        if (n < (ssize_t)buffer->range_length()) {
            buffer->release();
            buffer = NULL;
            err = ERROR_IO;
            break;
        }

        if (err != OK) {
            break;
        }

        if (stoppedPrematurely) {
            stoppedPrematurely = false;
        }

        buffer->release();
        buffer = NULL;
    }

    if ((err == OK || err == ERROR_END_OF_STREAM) && stoppedPrematurely) {
        err = ERROR_MALFORMED;
    }

    close(mFd);
    mFd = -1;
    mReachedEOS = true;
    if (err == ERROR_END_OF_STREAM) {
        return OK;
    }
    return err;
}
status_t PCMSource::read(
        MediaBuffer **out, const ReadOptions *options) {
    *out = NULL;
    int64_t seekTimeUs;
    ReadOptions::SeekMode seek = ReadOptions::SEEK_CLOSEST_SYNC;
    if (options != NULL && options->getSeekTo(&seekTimeUs,&seek)) {
        int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * 2;
        if (pos > mSize) {
            pos = mSize;
        }
        mCurrentPos = pos + mOffset;
    }

    MediaBuffer *buffer;
    status_t err = mGroup->acquire_buffer(&buffer);
    if (err != OK) {
        return err;
    }

    ssize_t n = mDataSource->readAt(
            mCurrentPos, buffer->data(), mBufferSize);
    if (n <= 0) {
        buffer->release();
        buffer = NULL;
        return ERROR_END_OF_STREAM;
    }

    mCurrentPos += n;

    buffer->set_range(0, n);

    if (mBitsPerSample == 8) {
        // Convert 8-bit unsigned samples to 16-bit signed.

        MediaBuffer *tmp;
        CHECK_EQ(mGroup->acquire_buffer(&tmp), OK);

        // The new buffer holds the sample number of samples, but each
        // one is 2 bytes wide.
        tmp->set_range(0, 2 * n);

        int16_t *dst = (int16_t *)tmp->data();
        const uint8_t *src = (const uint8_t *)buffer->data();
        while (n-- > 0) {
            *dst++ = ((int16_t)(*src) - 128) * 256;
            ++src;
        }

        buffer->release();
        buffer = tmp;
    } else if (mBitsPerSample == 24) {
        // Convert 24-bit signed samples to 16-bit signed.

        const uint8_t *src =
            (const uint8_t *)buffer->data() + buffer->range_offset();
        int16_t *dst = (int16_t *)src;

        size_t numSamples = buffer->range_length() / 3;
        for (size_t i = 0; i < numSamples; ++i) {
            int32_t x = (int32_t)(src[0] | src[1] << 8 | src[2] << 16);
            x = (x << 8) >> 8;  // sign extension

            x = x >> 8;
            *dst++ = (int16_t)x;
            src += 3;
        }

        buffer->set_range(buffer->range_offset(), 2 * numSamples);
    }

    size_t bytesPerSample = mBitsPerSample >> 3;

    buffer->meta_data()->setInt64(
            kKeyTime,
            1000000LL * (mCurrentPos - mOffset)
                / (mNumChannels * bytesPerSample) / mSampleRate);


    *out = buffer;

    return OK;
}
int main(int argc, char **argv) {
    android::ProcessState::self()->startThreadPool();

    DataSource::RegisterDefaultSniffers();

    const char *rtpFilename = NULL;
    const char *rtcpFilename = NULL;

    if (argc == 3) {
        rtpFilename = argv[1];
        rtcpFilename = argv[2];
    } else if (argc != 1) {
        fprintf(stderr, "usage: %s [ rtpFilename rtcpFilename ]\n", argv[0]);
        return 1;
    }

#if 0
    static const uint8_t kSPS[] = {
        0x67, 0x42, 0x80, 0x0a, 0xe9, 0x02, 0x83, 0xe4, 0x20, 0x00, 0x00, 0x7d, 0x00, 0x00, 0x0e, 0xa6, 0x00, 0x80
    };
    static const uint8_t kPPS[] = {
        0x68, 0xce, 0x3c, 0x80
    };
    AString out1, out2;
    encodeBase64(kSPS, sizeof(kSPS), &out1);
    encodeBase64(kPPS, sizeof(kPPS), &out2);
    printf("params=%s,%s\n", out1.c_str(), out2.c_str());
#endif

    sp<ALooper> looper = new ALooper;

    sp<UDPPusher> rtp_pusher;
    sp<UDPPusher> rtcp_pusher;

    if (rtpFilename != NULL) {
        rtp_pusher = new UDPPusher(rtpFilename, 5434);
        looper->registerHandler(rtp_pusher);

        rtcp_pusher = new UDPPusher(rtcpFilename, 5435);
        looper->registerHandler(rtcp_pusher);
    }

    sp<ARTPSession> session = new ARTPSession;
    looper->registerHandler(session);

#if 0
    // My H264 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=0-315\r\n"
        "a=isma-compliance:2,2.0,2\r\n"
        "m=video 5434 RTP/AVP 97\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:30\r\n"
        "a=rtpmap:97 H264/90000\r\n"
        "a=fmtp:97 packetization-mode=1;profile-level-id=42000C;"
          "sprop-parameter-sets=Z0IADJZUCg+I,aM44gA==\r\n"
        "a=mpeg4-esid:201\r\n"
        "a=cliprect:0,0,240,320\r\n"
        "a=framesize:97 320-240\r\n";
#elif 0
    // My H263 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=0-315\r\n"
        "a=isma-compliance:2,2.0,2\r\n"
        "m=video 5434 RTP/AVP 97\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:30\r\n"
        "a=rtpmap:97 H263-1998/90000\r\n"
        "a=cliprect:0,0,240,320\r\n"
        "a=framesize:97 320-240\r\n";
#elif 0
    // My AMR SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=0-315\r\n"
        "a=isma-compliance:2,2.0,2\r\n"
        "m=audio 5434 RTP/AVP 97\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:30\r\n"
        "a=rtpmap:97 AMR/8000/1\r\n"
        "a=fmtp:97 octet-align\r\n";
#elif 1
    // GTalk's H264 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=now-\r\n"
        "m=video 5434 RTP/AVP 96\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:320000\r\n"
        "a=rtpmap:96 H264/90000\r\n"
        "a=fmtp:96 packetization-mode=1;profile-level-id=42001E;"
          "sprop-parameter-sets=Z0IAHpZUBaHogA==,aM44gA==\r\n"
        "a=cliprect:0,0,480,270\r\n"
        "a=framesize:96 720-480\r\n";
#else
    // sholes H264 SDP
    static const char *raw =
        "v=0\r\n"
        "o=- 64 233572944 IN IP4 127.0.0.0\r\n"
        "s=QuickTime\r\n"
        "t=0 0\r\n"
        "a=range:npt=now-\r\n"
        "m=video 5434 RTP/AVP 96\r\n"
        "c=IN IP4 127.0.0.1\r\n"
        "b=AS:320000\r\n"
        "a=rtpmap:96 H264/90000\r\n"
        "a=fmtp:96 packetization-mode=1;profile-level-id=42001E;"
          "sprop-parameter-sets=Z0KACukCg+QgAAB9AAAOpgCA,aM48gA==\r\n"
        "a=cliprect:0,0,240,320\r\n"
        "a=framesize:96 320-240\r\n";
#endif

    sp<ASessionDescription> desc = new ASessionDescription;
    CHECK(desc->setTo(raw, strlen(raw)));

    CHECK_EQ(session->setup(desc), (status_t)OK);

    if (rtp_pusher != NULL) {
        rtp_pusher->start();
    }

    if (rtcp_pusher != NULL) {
        rtcp_pusher->start();
    }

    looper->start(false /* runOnCallingThread */);

    CHECK_EQ(session->countTracks(), 1u);
    sp<MediaSource> source = session->trackAt(0);

    sp<MediaSource> decoder = SimpleDecodingSource::Create(
            source, 0 /* flags: ACodec::kPreferSoftwareCodecs */);
    CHECK(decoder != NULL);

    CHECK_EQ(decoder->start(), (status_t)OK);

    for (;;) {
        MediaBuffer *buffer;
        status_t err = decoder->read(&buffer);

        if (err != OK) {
            if (err == INFO_FORMAT_CHANGED) {
                int32_t width, height;
                CHECK(decoder->getFormat()->findInt32(kKeyWidth, &width));
                CHECK(decoder->getFormat()->findInt32(kKeyHeight, &height));
                printf("INFO_FORMAT_CHANGED %d x %d\n", width, height);
                continue;
            }

            ALOGE("decoder returned error 0x%08x", err);
            break;
        }

#if 1
        if (buffer->range_length() != 0) {
            int64_t timeUs;
            CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));

            printf("decoder returned frame of size %zu at time %.2f secs\n",
                   buffer->range_length(), timeUs / 1E6);
        }
#endif

        buffer->release();
        buffer = NULL;
    }

    CHECK_EQ(decoder->stop(), (status_t)OK);

    looper->stop();

    return 0;
}
Exemple #30
0
status_t FLACDecoder::read(MediaBuffer **out, const ReadOptions* options) {
    int err = 0;
    *out = NULL;
    uint32 blockSize, usedBitstream, availLength = 0;
    uint32 flacOutputBufSize = FLAC_OUTPUT_BUFFER_SIZE;
    int *status = 0;

    bool seekSource = false, eos = false;

    if (!mInitStatus) {
        return NO_INIT;
    }

    int64_t seekTimeUs;
    ReadOptions::SeekMode mode;
    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
        ALOGD("qti_flac: Seek to %lld", seekTimeUs);
        CHECK(seekTimeUs >= 0);
        mNumFramesOutput = 0;
        seekSource = true;

        if (mInputBuffer) {
            mInputBuffer->release();
            mInputBuffer = NULL;
        }
    }
    else {
        seekTimeUs = -1;
    }

    if (mInputBuffer) {
        mInputBuffer->release();
        mInputBuffer = NULL;
    }

    if (!eos) {
        err = mSource->read(&mInputBuffer, options);
        if (err != OK) {
            ALOGE("qti_flac: Parser returned %d", err);
            eos = true;
            return err;
        }
    }

    int64_t timeUs;
    if (mInputBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
        mAnchorTimeUs = timeUs;
        mNumFramesOutput = 0;
        ALOGVV("qti_flac: mAnchorTimeUs %lld", mAnchorTimeUs);
    }
    else {
        CHECK(seekTimeUs < 0);
    }

    if (!eos) {
        if (mInputBuffer) {
            ALOGVV("qti_flac: Parser filled %d bytes", mInputBuffer->range_length());
            availLength = mInputBuffer->range_length();
            status = (*mProcessData)(&pFlacDecState,
                                     (uint8*)mInputBuffer->data(),
                                     availLength,
                                     mOutBuffer,
                                     &flacOutputBufSize,
                                     &usedBitstream,
                                     &blockSize);
        }

        ALOGVV("qti_flac: status %d, availLength %d, usedBitstream %d, blockSize %d",
                (int)status, availLength, usedBitstream, blockSize);

        MediaBuffer *buffer;
        CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), (status_t)OK);

        buffer->set_range(0, blockSize*mNumChannels*2);

        uint16_t *ptr = (uint16_t *) mOutBuffer;

        //Interleave the output from decoder for multichannel clips.
        if (mNumChannels > 1) {
            for (uint32_t k = 0; k < blockSize; k++) {
                for (uint32_t i = k, j = mNumChannels*k; i < blockSize*mNumChannels; i += blockSize, j++) {
                    mTmpBuf[j] = ptr[i];
                }
            }
            memcpy((uint16_t *)buffer->data(), mTmpBuf, blockSize*mNumChannels*2);
        }
        else {
            memcpy((uint16_t *)buffer->data(), mOutBuffer, blockSize*mNumChannels*2);
        }

        int64_t time = 0;
        time = mAnchorTimeUs + (mNumFramesOutput*1000000)/mSampleRate;
        buffer->meta_data()->setInt64(kKeyTime, time);
        mNumFramesOutput += blockSize;
        ALOGVV("qti_flac: time = %lld", time);

        *out = buffer;
    }

    return OK;
}