예제 #1
0
void NuPlayer::HTTPLiveSource::onSessionNotify_l(const sp<AMessage> &msg) {
    int32_t what;
    CHECK(msg->findInt32("what", &what));

    if(what == LiveSession::kWhatPicture) {
        sp<ABuffer> metabuffer;
        CHECK(msg->findBuffer("buffer", &metabuffer));
        
        AString mimeType;
        sp<ABuffer> buffer;
        if(((metabuffer)->meta()->findString("mime", &mimeType)) &&
                ((metabuffer)->meta()->findBuffer("pictureBuffer", &buffer))) {
            if (mMetaData == NULL) {
                mMetaData = new MetaData;
            }
            mMetaData->setCString(kKeyAlbumArtMIME, mimeType.c_str());
            mMetaData->setData(kKeyAlbumArt, MetaData::TYPE_NONE, buffer->data(), buffer->size());
            ALOGI("kKeyAlbumArt set Data :%s, datasize:%d", mimeType.c_str(), buffer->size());
            
            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", NuPlayer::Source::kWhatPicture);
            notify->post();
        }
    }
    else if (what == LiveSession::kWhatBufferingStart) {
        sp<AMessage> notify = dupNotify();
        notify->setInt32("what", kWhatBufferingStart);
        notify->post();
    }
    else if (what == LiveSession::kWhatBufferingEnd) {
        sp<AMessage> notify = dupNotify();
        notify->setInt32("what", kWhatBufferingEnd);
        notify->post();
    }
}
void NuPlayer::RTSPSource::prepareAsync() {
    if (mLooper == NULL) {
        mLooper = new ALooper;
        mLooper->setName("rtsp");
        mLooper->start();

        mLooper->registerHandler(this);
    }

    CHECK(mHandler == NULL);
    CHECK(mSDPLoader == NULL);

    sp<AMessage> notify = new AMessage(kWhatNotify, id());

    CHECK_EQ(mState, (int)DISCONNECTED);
    mState = CONNECTING;

    if (mIsSDP) {
        mSDPLoader = new SDPLoader(notify,
                (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
                mHTTPService);

        mSDPLoader->load(
                mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
    } else {
        mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID);
        mLooper->registerHandler(mHandler);

        mHandler->connect();
    }

    sp<AMessage> notifyStart = dupNotify();
    notifyStart->setInt32("what", kWhatBufferingStart);
    notifyStart->post();
}
void NuPlayer::RTSPSource::startBufferingIfNecessary() {
    Mutex::Autolock _l(mBufferingLock);

    if (!mBuffering) {
        mBuffering = true;

        sp<AMessage> notify = dupNotify();
        notify->setInt32("what", kWhatPauseOnBufferingStart);
        notify->post();
    }
}
예제 #4
0
void NuPlayer::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatSessionNotify:
        {
            onSessionNotify(msg);
            break;
        }

        case kWhatFetchSubtitleData:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));

            if (generation != mFetchSubtitleDataGeneration) {
                // stale
                break;
            }

            sp<ABuffer> buffer;
            if (mLiveSession->dequeueAccessUnit(
                    LiveSession::STREAMTYPE_SUBTITLES, &buffer) == OK) {
                sp<AMessage> notify = dupNotify();
                notify->setInt32("what", kWhatSubtitleData);
                notify->setBuffer("buffer", buffer);
                notify->post();

                int64_t timeUs, baseUs, durationUs, delayUs;
                CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
                CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
                CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
                delayUs = baseUs + timeUs - ALooper::GetNowUs();

                msg->post(delayUs > 0ll ? delayUs : 0ll);
            } else {
                // try again in 1 second
                msg->post(1000000ll);
            }

            break;
        }

        default:
            Source::onMessageReceived(msg);
            break;
    }
}
status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit(
        bool audio, sp<ABuffer> *accessUnit) {
    status_t err = mLiveSession->dequeueAccessUnit(
            audio ? LiveSession::STREAMTYPE_AUDIO
                  : LiveSession::STREAMTYPE_VIDEO,
            accessUnit);
    if (err == OK && audio) {
        sp<AMessage> format;
        if (mLiveSession->getStreamFormat(LiveSession::STREAMTYPE_VIDEO, &format) != OK) {
            // Detect the image in audio only clip
            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatShowImage);
            ExtendedUtils::detectAndPostImage(*accessUnit, notify);
        }
    }
    return err;
}
bool NuPlayer::RTSPSource::stopBufferingIfNecessary() {
    Mutex::Autolock _l(mBufferingLock);

    if (mBuffering) {
        if (!haveSufficientDataOnAllTracks()) {
            return false;
        }

        mBuffering = false;

        sp<AMessage> notify = dupNotify();
        notify->setInt32("what", kWhatResumeOnBufferingEnd);
        notify->post();
    }

    return true;
}
예제 #7
0
void NuPlayer::HTTPLiveSource::pollForRawData(
        const sp<AMessage> &msg, int32_t currentGeneration,
        LiveSession::StreamType fetchType, int32_t pushWhat) {

    int32_t generation;
    CHECK(msg->findInt32("generation", &generation));

    if (generation != currentGeneration) {
        return;
    }

    sp<ABuffer> buffer;
    while (mLiveSession->dequeueAccessUnit(fetchType, &buffer) == OK) {

        sp<AMessage> notify = dupNotify();
        notify->setInt32("what", pushWhat);
        notify->setBuffer("buffer", buffer);

        int64_t timeUs, baseUs, delayUs;
        CHECK(buffer->meta()->findInt64("baseUs", &baseUs));
        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
        delayUs = baseUs + timeUs - ALooper::GetNowUs();

        if (fetchType == LiveSession::STREAMTYPE_SUBTITLES) {
            notify->post();
            msg->post(delayUs > 0ll ? delayUs : 0ll);
            return;
        } else if (fetchType == LiveSession::STREAMTYPE_METADATA) {
            if (delayUs < -1000000ll) { // 1 second
                continue;
            }
            notify->post();
            // push all currently available metadata buffers in each invocation of pollForRawData
            // continue;
        } else {
            TRESPASS();
        }
    }

    // try again in 1 second
    msg->post(1000000ll);
}
예제 #8
0
void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
    int32_t what;
    CHECK(msg->findInt32("what", &what));

    switch (what) {
        case LiveSession::kWhatPrepared:
        {
            // notify the current size here if we have it, otherwise report an initial size of (0,0)
            sp<AMessage> format = getFormat(false /* audio */);
            int32_t width;
            int32_t height;
            if (format != NULL &&
                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
                notifyVideoSizeChanged(format);
            } else {
                notifyVideoSizeChanged();
            }

#ifdef MTK_AOSP_ENHANCEMENT
            uint32_t flags = 0;
#else            
            uint32_t flags = FLAG_CAN_PAUSE;
#endif
            if (mLiveSession->isSeekable()) {
#ifdef MTK_AOSP_ENHANCEMENT
                flags |= FLAG_CAN_PAUSE;
#endif                
                flags |= FLAG_CAN_SEEK;
                flags |= FLAG_CAN_SEEK_BACKWARD;
                flags |= FLAG_CAN_SEEK_FORWARD;
            }

            if (mLiveSession->hasDynamicDuration()) {
                flags |= FLAG_DYNAMIC_DURATION;
            }

            notifyFlagsChanged(flags);

            notifyPrepared();
            break;
        }

        case LiveSession::kWhatPreparationFailed:
        {
            status_t err;
            CHECK(msg->findInt32("err", &err));

            notifyPrepared(err);
            break;
        }

        case LiveSession::kWhatStreamsChanged:
        {
            uint32_t changedMask;
            CHECK(msg->findInt32(
                        "changedMask", (int32_t *)&changedMask));

            bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO;
            bool video = changedMask & LiveSession::STREAMTYPE_VIDEO;
#ifdef MTK_AOSP_ENHANCEMENT
			ALOGI("receive LiveSession::kWhatStreamsChanged,queue Decoder Shutdown for %s,%s",\
				audio?"audio":"",video?"video":"");
#endif
            sp<AMessage> reply;
            CHECK(msg->findMessage("reply", &reply));

            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatQueueDecoderShutdown);
            notify->setInt32("audio", audio);
            notify->setInt32("video", video);
            notify->setMessage("reply", reply);
            notify->post();
            break;
        }

        case LiveSession::kWhatError:
        {
            break;
        }
#ifdef MTK_AOSP_ENHANCEMENT
        case LiveSession::kWhatPicture:
        case LiveSession::kWhatBufferingStart:
        case LiveSession::kWhatBufferingEnd:
        {
            onSessionNotify_l(msg);
            break;
        }
#endif
        default:
            TRESPASS();
    }
}
예제 #9
0
status_t NuPlayer::RTSPSource::dequeueAccessUnit(
        bool audio, sp<ABuffer> *accessUnit) {
    if (mBuffering) {
        if (!haveSufficientDataOnAllTracks()) {
            return -EWOULDBLOCK;
        }

        mBuffering = false;

        sp<AMessage> notify = dupNotify();
        notify->setInt32("what", kWhatBufferingEnd);
        notify->post();
    }

    sp<AnotherPacketSource> source = getSource(audio);

    if (source == NULL) {
        return -EWOULDBLOCK;
    }

    status_t finalResult;
    if (!source->hasBufferAvailable(&finalResult)) {
        if (finalResult == OK) {
            int64_t mediaDurationUs = 0;
            getDuration(&mediaDurationUs);
            sp<AnotherPacketSource> otherSource = getSource(!audio);
            status_t otherFinalResult;

            // If other source already signaled EOS, this source should also signal EOS
            if (otherSource != NULL &&
                    !otherSource->hasBufferAvailable(&otherFinalResult) &&
                    otherFinalResult == ERROR_END_OF_STREAM) {
                source->signalEOS(ERROR_END_OF_STREAM);
                return ERROR_END_OF_STREAM;
            }

            // If this source has detected near end, give it some time to retrieve more
            // data before signaling EOS
            if (source->isFinished(mediaDurationUs)) {
                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
                if (eosTimeout == 0) {
                    setEOSTimeout(audio, ALooper::GetNowUs());
                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
                    setEOSTimeout(audio, 0);
                    source->signalEOS(ERROR_END_OF_STREAM);
                    return ERROR_END_OF_STREAM;
                }
                return -EWOULDBLOCK;
            }

            if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
                // We should not enter buffering mode
                // if any of the sources already have detected EOS.
                mBuffering = true;

                sp<AMessage> notify = dupNotify();
                notify->setInt32("what", kWhatBufferingStart);
                notify->post();
            }

            return -EWOULDBLOCK;
        }
        return finalResult;
    }

    setEOSTimeout(audio, 0);

    return source->dequeueAccessUnit(accessUnit);
}
예제 #10
0
void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
    int32_t what;
    CHECK(msg->findInt32("what", &what));

    switch (what) {
        case LiveSession::kWhatPrepared:
        {
            // notify the current size here if we have it, otherwise report an initial size of (0,0)
            sp<AMessage> format = getFormat(false /* audio */);
            int32_t width;
            int32_t height;
            if (format != NULL &&
                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
                notifyVideoSizeChanged(format);
            } else {
                notifyVideoSizeChanged();
            }

            uint32_t flags = 0;
            if (mLiveSession->isSeekable()) {
                flags |= FLAG_CAN_PAUSE;
                flags |= FLAG_CAN_SEEK;
                flags |= FLAG_CAN_SEEK_BACKWARD;
                flags |= FLAG_CAN_SEEK_FORWARD;
            }

            if (mLiveSession->hasDynamicDuration()) {
                flags |= FLAG_DYNAMIC_DURATION;
            }

            notifyFlagsChanged(flags);

            notifyPrepared();
            break;
        }

        case LiveSession::kWhatPreparationFailed:
        {
            status_t err;
            CHECK(msg->findInt32("err", &err));

            notifyPrepared(err);
            break;
        }

        case LiveSession::kWhatStreamsChanged:
        {
            uint32_t changedMask;
            CHECK(msg->findInt32(
                        "changedMask", (int32_t *)&changedMask));

            bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO;
            bool video = changedMask & LiveSession::STREAMTYPE_VIDEO;

            sp<AMessage> reply;
            CHECK(msg->findMessage("reply", &reply));

            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatQueueDecoderShutdown);
            notify->setInt32("audio", audio);
            notify->setInt32("video", video);
            notify->setMessage("reply", reply);
            notify->post();
            break;
        }

        case LiveSession::kWhatBufferingStart:
        {
            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatPauseOnBufferingStart);
            notify->post();
            break;
        }

        case LiveSession::kWhatBufferingEnd:
        {
            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatResumeOnBufferingEnd);
            notify->post();
            break;
        }


        case LiveSession::kWhatBufferingUpdate:
        {
            sp<AMessage> notify = dupNotify();
            int32_t percentage;
            CHECK(msg->findInt32("percentage", &percentage));
            notify->setInt32("what", kWhatBufferingUpdate);
            notify->setInt32("percentage", percentage);
            notify->post();
            break;
        }

        case LiveSession::kWhatMetadataDetected:
        {
            if (!mHasMetadata) {
                mHasMetadata = true;

                sp<AMessage> notify = dupNotify();
                // notification without buffer triggers MEDIA_INFO_METADATA_UPDATE
                notify->setInt32("what", kWhatTimedMetaData);
                notify->post();
            }
            break;
        }

        case LiveSession::kWhatError:
        {
            break;
        }

        default:
            TRESPASS();
    }
}
예제 #11
0
void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
    int32_t what;
    CHECK(msg->findInt32("what", &what));

    switch (what) {
        case LiveSession::kWhatPrepared:
        {
            // notify the current size here if we have it, otherwise report an initial size of (0,0)
            sp<AMessage> format = getFormat(false /* audio */);
            int32_t width;
            int32_t height;
            if (format != NULL &&
                    format->findInt32("width", &width) && format->findInt32("height", &height)) {
                notifyVideoSizeChanged(width, height);
            } else {
#ifdef ANDROID_DEFAULT_CODE
                notifyVideoSizeChanged(0, 0);
#endif                
            }
#ifdef ANDROID_DEFAULT_CODE
            uint32_t flags = FLAG_CAN_PAUSE;
#else            
            uint32_t flags = 0;
#endif
            if (mLiveSession->isSeekable()) {
#ifndef ANDROID_DEFAULT_CODE
                flags |= FLAG_CAN_PAUSE;
#endif                
                flags |= FLAG_CAN_SEEK;
                flags |= FLAG_CAN_SEEK_BACKWARD;
                flags |= FLAG_CAN_SEEK_FORWARD;
            }

            if (mLiveSession->hasDynamicDuration()) {
                flags |= FLAG_DYNAMIC_DURATION;
            }

            notifyFlagsChanged(flags);

            notifyPrepared();
            break;
        }

        case LiveSession::kWhatPreparationFailed:
        {
            status_t err;
            CHECK(msg->findInt32("err", &err));

            notifyPrepared(err);
            break;
        }

        case LiveSession::kWhatStreamsChanged:
        {
            uint32_t changedMask;
            CHECK(msg->findInt32(
                        "changedMask", (int32_t *)&changedMask));

            bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO;
            bool video = changedMask & LiveSession::STREAMTYPE_VIDEO;

            sp<AMessage> reply;
            CHECK(msg->findMessage("reply", &reply));

            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatQueueDecoderShutdown);
            notify->setInt32("audio", audio);
            notify->setInt32("video", video);
            notify->setMessage("reply", reply);
            notify->post();
            break;
        }

        case LiveSession::kWhatError:
        {
            break;
        }
#ifndef ANDROID_DEFAULT_CODE
        case LiveSession::kWhatPicture:
        {
         	  sp<ABuffer> metabuffer;
            CHECK(msg->findBuffer("buffer", &metabuffer));
           
            AString mimeType;
            sp<ABuffer> buffer;
            if(((metabuffer)->meta()->findString("mime", &mimeType)) &&
                   ((metabuffer)->meta()->findBuffer("pictureBuffer", &buffer)))
            {
                if (mMetaData == NULL)
                {
        	         mMetaData = new MetaData;
                }
                mMetaData->setCString(kKeyAlbumArtMIME, mimeType.c_str());
                mMetaData->setData(kKeyAlbumArt, MetaData::TYPE_NONE, buffer->data(), buffer->size());
                ALOGI("kKeyAlbumArt set Data :%s, datasize:%d", mimeType.c_str(), buffer->size());

                sp<AMessage> notify = dupNotify();
                notify->setInt32("what", NuPlayer::Source::kWhatPicture);
                notify->post();
            }
            break;
        }
        case LiveSession::kWhatBufferingStart:
        {
            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatBufferingStart);
            notify->post();
            break;
        }
        case LiveSession::kWhatBufferingEnd:
        {
            sp<AMessage> notify = dupNotify();
            notify->setInt32("what", kWhatBufferingEnd);
            notify->post();
            break;
        }
#endif
        default:
            TRESPASS();
    }
}