void CryptoTrack::Update(sp<MetaData>& aMetaData) { valid = aMetaData->findInt32(kKeyCryptoMode, &mode) && aMetaData->findInt32(kKeyCryptoDefaultIVSize, &iv_size) && FindData(aMetaData, kKeyCryptoKey, &key); }
void FFMPEGSoftCodec::overrideComponentName( uint32_t /*quirks*/, const sp<AMessage> &msg, AString* componentName, AString* mime, int32_t isEncoder) { int32_t wmvVersion = 0; if (!strncasecmp(mime->c_str(), MEDIA_MIMETYPE_VIDEO_WMV, strlen(MEDIA_MIMETYPE_VIDEO_WMV)) && msg->findInt32(ExtendedCodec::getMsgKey(kKeyWMVVersion), &wmvVersion)) { ALOGD("Found WMV version key %d", wmvVersion); if (wmvVersion == 1) { ALOGD("Use FFMPEG for unsupported WMV track"); componentName->setTo("OMX.ffmpeg.wmv.decoder"); } } int32_t encodeOptions = 0; if (!isEncoder && !strncasecmp(mime->c_str(), MEDIA_MIMETYPE_AUDIO_WMA, strlen(MEDIA_MIMETYPE_AUDIO_WMA)) && !msg->findInt32(ExtendedCodec::getMsgKey(kKeyWMAEncodeOpt), &encodeOptions)) { ALOGD("Use FFMPEG for unsupported WMA track"); componentName->setTo("OMX.ffmpeg.wma.decoder"); } // Google's decoder doesn't support MAIN profile int32_t aacProfile = 0; if (!isEncoder && !strncasecmp(mime->c_str(), MEDIA_MIMETYPE_AUDIO_AAC, strlen(MEDIA_MIMETYPE_AUDIO_AAC)) && msg->findInt32(ExtendedCodec::getMsgKey(kKeyAACAOT), &aacProfile)) { if (aacProfile == OMX_AUDIO_AACObjectMain) { ALOGD("Use FFMPEG for AAC MAIN profile"); componentName->setTo("OMX.ffmpeg.aac.decoder"); } } }
status_t FFMPEGSoftCodec::setFFmpegVideoFormat( const sp<AMessage> &msg, sp<IOMX> OMXhandle, IOMX::node_id nodeID) { int32_t codec_id = 0; int32_t width = 0; int32_t height = 0; OMX_VIDEO_PARAM_FFMPEGTYPE param; ALOGD("setFFmpegVideoFormat"); CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyCodecId), &codec_id)); CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyWidth), &width)); CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyHeight), &height)); InitOMXParams(¶m); param.nPortIndex = kPortIndexInput; status_t err = OMXhandle->getParameter( nodeID, OMX_IndexParamVideoFFmpeg, ¶m, sizeof(param)); if (err != OK) return err; param.eCodecId = codec_id; param.nWidth = width; param.nHeight = height; err = OMXhandle->setParameter( nodeID, OMX_IndexParamVideoFFmpeg, ¶m, sizeof(param)); return err; }
void AudioOffloadPlayer::SendMetaDataToHal(sp<AudioSink>& aSink, const sp<MetaData>& aMeta) { int32_t sampleRate = 0; int32_t bitRate = 0; int32_t channelMask = 0; int32_t delaySamples = 0; int32_t paddingSamples = 0; CHECK(aSink.get()); AudioParameter param = AudioParameter(); if (aMeta->findInt32(kKeySampleRate, &sampleRate)) { param.addInt(String8(AUDIO_OFFLOAD_CODEC_SAMPLE_RATE), sampleRate); } if (aMeta->findInt32(kKeyChannelMask, &channelMask)) { param.addInt(String8(AUDIO_OFFLOAD_CODEC_NUM_CHANNEL), channelMask); } if (aMeta->findInt32(kKeyBitRate, &bitRate)) { param.addInt(String8(AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE), bitRate); } if (aMeta->findInt32(kKeyEncoderDelay, &delaySamples)) { param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), delaySamples); } if (aMeta->findInt32(kKeyEncoderPadding, &paddingSamples)) { param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), paddingSamples); } AUDIO_OFFLOAD_LOG(PR_LOG_DEBUG, ("SendMetaDataToHal: bitRate %d," " sampleRate %d, chanMask %d, delaySample %d, paddingSample %d", bitRate, sampleRate, channelMask, delaySamples, paddingSamples)); aSink->SetParameters(param.toString()); return; }
status_t FFMPEGSoftCodec::setDTSFormat( const sp<AMessage> &msg, sp<IOMX> OMXhandle, IOMX::node_id nodeID) { int32_t numChannels = 0; int32_t sampleRate = 0; int32_t bitsPerSample = 0; OMX_AUDIO_PARAM_DTSTYPE param; CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyChannelCount), &numChannels)); CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeySampleRate), &sampleRate)); ALOGV("Channels: %d, SampleRate: %d", numChannels, sampleRate); status_t err = setRawAudioFormat(msg, OMXhandle, nodeID); if (err != OK) return err; InitOMXParams(¶m); param.nPortIndex = kPortIndexInput; err = OMXhandle->getParameter( nodeID, OMX_IndexParamAudioDts, ¶m, sizeof(param)); if (err != OK) return err; param.nChannels = numChannels; param.nSamplingRate = sampleRate; return OMXhandle->setParameter( nodeID, OMX_IndexParamAudioDts, ¶m, sizeof(param)); }
void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { int32_t audio; CHECK(msg->findInt32("audio", &audio)); if (dropBufferWhileFlushing(audio, msg)) { return; } int32_t finalResult; CHECK(msg->findInt32("finalResult", &finalResult)); QueueEntry entry; entry.mOffset = 0; entry.mFinalResult = finalResult; if (audio) { if (mAudioQueue.empty() && mSyncQueues) { syncQueuesDone(); } mAudioQueue.push_back(entry); postDrainAudioQueue(); } else { if (mVideoQueue.empty() && mSyncQueues) { syncQueuesDone(); } mVideoQueue.push_back(entry); postDrainVideoQueue(); } }
void ARTSPConnection::onReconnect(const sp<AMessage> &msg) { ALOGV("onReconnect"); sp<AMessage> reply; CHECK(msg->findMessage("reply", &reply)); int32_t connectionID; CHECK(msg->findInt32("connection-id", &connectionID)); if ((connectionID != mConnectionID) || mState != CONNECTING) { // While we were attempting to connect, the attempt was // cancelled. reply->setInt32("result", -ECONNABORTED); reply->post(); if (mAddrHeader != NULL) { freeaddrinfo((struct addrinfo *)mAddrHeader); mAddrHeader = NULL; } return; } int32_t port; CHECK(msg->findInt32("port", &port)); if (!createSocketAndConnect(mAddrHeader, port, reply)) { ALOGV("Failed to reconnect"); reply->setInt32("result", -errno); mState = DISCONNECTED; mSocket = -1; reply->post(); freeaddrinfo((struct addrinfo *)mAddrHeader); mAddrHeader = NULL; } }
CedarXSoftwareRenderer::CedarXSoftwareRenderer( const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta) : mYUVMode(None), mNativeWindow(nativeWindow) { int32_t tmp; CHECK(meta->findInt32(kKeyColorFormat, &tmp)); mColorFormat = (OMX_COLOR_FORMATTYPE)tmp; //CHECK(meta->findInt32(kKeyScreenID, &screenID)); //CHECK(meta->findInt32(kKeyColorFormat, &halFormat)); CHECK(meta->findInt32(kKeyWidth, &mWidth)); CHECK(meta->findInt32(kKeyHeight, &mHeight)); int32_t rotationDegrees; if (!meta->findInt32(kKeyRotation, &rotationDegrees)) { rotationDegrees = 0; } int halFormat; size_t bufWidth, bufHeight; halFormat = HAL_PIXEL_FORMAT_YV12; bufWidth = mWidth; bufHeight = mHeight; CHECK(mNativeWindow != NULL); CHECK_EQ(0, native_window_set_usage( mNativeWindow.get(), GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP)); CHECK_EQ(0, native_window_set_scaling_mode( mNativeWindow.get(), NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW)); // Width must be multiple of 32??? CHECK_EQ(0, native_window_set_buffers_geometry( mNativeWindow.get(), bufWidth, bufHeight, halFormat)); uint32_t transform; switch (rotationDegrees) { case 0: transform = 0; break; case 90: transform = HAL_TRANSFORM_ROT_90; break; case 180: transform = HAL_TRANSFORM_ROT_180; break; case 270: transform = HAL_TRANSFORM_ROT_270; break; default: transform = 0; break; } if (transform) { CHECK_EQ(0, native_window_set_buffers_transform( mNativeWindow.get(), transform)); } }
sp<AMessage> DashPlayer::Decoder::makeFormat(const sp<MetaData> &meta) { CHECK(mCSD.isEmpty()); sp<AMessage> msg; uint32_t type; const void *data; size_t size; CHECK_EQ(convertMetaDataToMessage(meta, &msg), (status_t)OK); int32_t value; if (meta->findInt32(kKeySmoothStreaming, &value)) { msg->setInt32("smooth-streaming", value); } if (meta->findInt32(kKeyIsDRM, &value)) { msg->setInt32("secure-op", 1); } if (meta->findInt32(kKeyRequiresSecureBuffers, &value)) { msg->setInt32("requires-secure-buffers", 1); } if (meta->findInt32(kKeyEnableDecodeOrder, &value)) { msg->setInt32("decodeOrderEnable", value); } if (meta->findData(kKeyAacCodecSpecificData, &type, &data, &size)) { if (size > 0 && data != NULL) { sp<ABuffer> buffer = new ABuffer(size); if (buffer != NULL) { memcpy(buffer->data(), data, size); buffer->meta()->setInt32("csd", true); buffer->meta()->setInt64("timeUs", 0); msg->setBuffer("csd-0", buffer); } else { ALOGE("kKeyAacCodecSpecificData ABuffer Allocation failed"); } } else { ALOGE("Not a valid data pointer or size == 0"); } } mCSDIndex = 0; for (size_t i = 0;; ++i) { sp<ABuffer> csd; if (!msg->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) { break; } mCSD.push(csd); } return msg; }
void SimpleSoftOMXComponent::onMessageReceived(const sp<AMessage> &msg) { Mutex::Autolock autoLock(mLock); uint32_t msgType = msg->what(); ALOGV("msgType = %d", msgType); switch (msgType) { case kWhatSendCommand: { int32_t cmd, param; CHECK(msg->findInt32("cmd", &cmd)); CHECK(msg->findInt32("param", ¶m)); onSendCommand((OMX_COMMANDTYPE)cmd, (OMX_U32)param); break; } case kWhatEmptyThisBuffer: case kWhatFillThisBuffer: { OMX_BUFFERHEADERTYPE *header; CHECK(msg->findPointer("header", (void **)&header)); CHECK(mState == OMX_StateExecuting && mTargetState == mState); bool found = false; size_t portIndex = (kWhatEmptyThisBuffer == msgType)? header->nInputPortIndex: header->nOutputPortIndex; PortInfo *port = &mPorts.editItemAt(portIndex); for (size_t j = 0; j < port->mBuffers.size(); ++j) { BufferInfo *buffer = &port->mBuffers.editItemAt(j); if (buffer->mHeader == header) { CHECK(!buffer->mOwnedByUs); buffer->mOwnedByUs = true; CHECK((msgType == kWhatEmptyThisBuffer && port->mDef.eDir == OMX_DirInput) || (port->mDef.eDir == OMX_DirOutput)); port->mQueue.push_back(buffer); onQueueFilled(portIndex); found = true; break; } } CHECK(found); break; } default: TRESPASS(); break; } }
// static void ColorUtils::getColorConfigFromFormat( const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) { if (!format->findInt32("color-range", range)) { *range = kColorRangeUnspecified; } if (!format->findInt32("color-standard", standard)) { *standard = kColorStandardUnspecified; } if (!format->findInt32("color-transfer", transfer)) { *transfer = kColorTransferUnspecified; } }
status_t CameraController::setupCamera(const sp<MetaData> &videoMetaData, const sp<Surface> &previewSurface ) { F_LOG; int width, height, fps ; videoMetaData->findInt32(kKeyWidth, &width); videoMetaData->findInt32(kKeyHeight, &height); videoMetaData->findInt32(kKeySampleRate, &fps); // videoMetaData->findInt32(kKeyBitRate, &mVideoBitRate); //videoMetaData->findCString(kKeyMIMEType, &mVideoEncoder); //should be MEDIA_MIME_TYPE_VIDEO_AVC mCamera = android::Camera::connect(0); LOGD("After Camera::connect "); if (mCamera != NULL) { android::String8 s = mCamera->getParameters(); mCameraParams = new android::CameraParameters(s); LOGV("Getting camera parameters"); char buf[50]; sprintf(buf, "%ux%u", width, height); mCameraParams->set("video-size", buf); mCameraParams->set("preview-format","yuv420sp"); mCameraParams->setPreviewSize(width, height); mCameraParams->setPreviewFrameRate(fps); LOGV("Setting camera params preview_size:%dx%d FPS:%d", width, height, fps); mCamera->setParameters(mCameraParams->flatten()); mCameraSource = android::CameraSource::CreateFromCamera(mCamera); LOGV("Setting preview"); mCamera->setPreviewDisplay(previewSurface); // Get supported preview frame rates from camera driver memset(mSupportedFps, 0, sizeof(mSupportedFps)); const char *fpsValues = mCameraParams->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); LOGV("Supported camera preview framerates: %s", fpsValues); char *tokenString = new char[strlen(fpsValues)]; strcpy(tokenString, fpsValues); char *fpsToken; fpsToken = strtok(tokenString, ","); while (fpsToken != NULL) { if (atoi(fpsToken)< MAX_FRAME_RATE_VALUES) { mSupportedFps[atoi(fpsToken)] = 1; } fpsToken = strtok(NULL, ","); } mInitialized = true; setFramerate(fps); return OK; } else { LOGE("************************* Failed to open camera ************************* "); return UNKNOWN_ERROR; } }
// static void ColorUtils::copyColorConfig(const sp<AMessage> &source, sp<AMessage> &target) { // 0 values are unspecified int32_t value; if (source->findInt32("color-range", &value)) { target->setInt32("color-range", value); } if (source->findInt32("color-standard", &value)) { target->setInt32("color-standard", value); } if (source->findInt32("color-transfer", &value)) { target->setInt32("color-transfer", value); } }
void RTSPSource::onDisconnected(const sp<AMessage> &msg) { status_t err; CHECK(msg != NULL); CHECK(msg->findInt32("result", &err)); CHECK_NE(err, (status_t)OK); CHECK(mLooper != NULL); CHECK(mHandler != NULL); mLooper->unregisterHandler(mHandler->id()); mHandler.clear(); mState = DISCONNECTED; mFinalResult = err; if (mDisconnectReplyID != 0) { finishDisconnectIfPossible(); } if (mListener) { // err is always set to UNKNOWN_ERROR from // Android right now, rename err to NS_ERROR_NET_TIMEOUT. mListener->OnDisconnected(0, NS_ERROR_NET_TIMEOUT); } mAudioTrack = NULL; mVideoTrack = NULL; mTracks.clear(); }
void DirectRenderer::onDecoderNotify(const sp<AMessage> &msg) { size_t trackIndex; CHECK(msg->findSize("trackIndex", &trackIndex)); int32_t what; CHECK(msg->findInt32("what", &what)); switch (what) { case DecoderContext::kWhatOutputBufferReady: { size_t index; CHECK(msg->findSize("index", &index)); int64_t timeUs; CHECK(msg->findInt64("timeUs", &timeUs)); sp<ABuffer> buffer; CHECK(msg->findBuffer("buffer", &buffer)); queueOutputBuffer(trackIndex, index, timeUs, buffer); break; } default: TRESPASS(); } }
//video status_t FFMPEGSoftCodec::setWMVFormat( const sp<AMessage> &msg, sp<IOMX> OMXhandle, IOMX::node_id nodeID) { int32_t version = 0; OMX_VIDEO_PARAM_WMVTYPE paramWMV; if (!msg->findInt32(ExtendedCodec::getMsgKey(kKeyWMVVersion), &version)) { ALOGE("WMV version not detected"); return ERROR_UNSUPPORTED; } InitOMXParams(¶mWMV); paramWMV.nPortIndex = kPortIndexInput; status_t err = OMXhandle->getParameter( nodeID, OMX_IndexParamVideoWmv, ¶mWMV, sizeof(paramWMV)); if (err != OK) return err; if (version == kTypeWMVVer_7) { paramWMV.eFormat = OMX_VIDEO_WMVFormat7; } else if (version == kTypeWMVVer_8) { paramWMV.eFormat = OMX_VIDEO_WMVFormat8; } else if (version == kTypeWMVVer_9) { paramWMV.eFormat = OMX_VIDEO_WMVFormat9; } err = OMXhandle->setParameter( nodeID, OMX_IndexParamVideoWmv, ¶mWMV, sizeof(paramWMV)); return err; }
void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) { IOMX::buffer_id bufferID; CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); BufferInfo *info = findBufferByID(kPortIndexOutput, bufferID); if (mState != STARTED) { // we're not running, so we'll just keep that buffer... info->mStatus = BufferInfo::OWNED_BY_US; return; } if (info->mGeneration != mGeneration) { ALOGV("Caught a stale output buffer [ID %d]", bufferID); // buffer is stale (taken before a flush/shutdown) - keep it CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US); return; } CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM); info->mStatus = BufferInfo::OWNED_BY_US; mAvailableOutputBuffers.push_back(info); processBuffers(); ALOGV("Handled kWhatOutputBufferDrained. [ID %u]", bufferID); }
void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) { if (mState == DISCONNECTED) { return; } status_t err; CHECK(msg->findInt32("result", &err)); CHECK_NE(err, (status_t)OK); mLooper->unregisterHandler(mHandler->id()); mHandler.clear(); if (mState == CONNECTING) { // We're still in the preparation phase, signal that it // failed. notifyPrepared(err); } mState = DISCONNECTED; setError(err); if (mDisconnectReplyID != 0) { finishDisconnectIfPossible(); } }
void NuPlayerVPPProcessor::onFreeBuffer(const sp<AMessage> &msg) { int32_t reuse = 0, index = 0; CHECK(msg->findInt32("reuse", &reuse)); CHECK(msg->findInt32("index", &index)); /* * flushShutdown quits threads * and there are still some rendering buffer not processed, process here */ if (!mThreadRunning) { ACodec::BufferInfo * info = findBufferByGraphicBuffer(mOutput[index].mGraphicBuffer); if (info != NULL) { LOGV("cancel buffer after thread quit ; graphicBuffer = %p", mOutput[index].mGraphicBuffer.get()); cancelBufferToNativeWindow(info); mOutput[index].resetBuffer(NULL); } } else { if (reuse == 1) { mOutput[index].resetBuffer(mOutput[index].mGraphicBuffer); } else if (reuse == 0) { ACodec::BufferInfo *info = dequeueBufferFromNativeWindow(); CHECK(info != NULL); mOutput[index].resetBuffer(info->mGraphicBuffer); } } }
bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const { if (targetFormat == NULL) { return true; } AString mime; if (!targetFormat->findString("mime", &mime)) { return false; } if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) { // field-by-field comparison const char * keys[] = { "channel-count", "sample-rate", "is-adts" }; for (unsigned int i = 0; i < sizeof(keys) / sizeof(keys[0]); i++) { int32_t oldVal, newVal; if (!mFormat->findInt32(keys[i], &oldVal) || !targetFormat->findInt32(keys[i], &newVal) || oldVal != newVal) { return false; } } sp<ABuffer> oldBuf, newBuf; if (mFormat->findBuffer("csd-0", &oldBuf) && targetFormat->findBuffer("csd-0", &newBuf)) { if (oldBuf->size() != newBuf->size()) { return false; } return !memcmp(oldBuf->data(), newBuf->data(), oldBuf->size()); } } return false; }
status_t FFMPEGSoftCodec::setRVFormat( const sp<AMessage> &msg, sp<IOMX> OMXhandle, IOMX::node_id nodeID) { int32_t version = 0; OMX_VIDEO_PARAM_RVTYPE paramRV; CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyRVVersion), &version)); InitOMXParams(¶mRV); paramRV.nPortIndex = kPortIndexInput; status_t err = OMXhandle->getParameter( nodeID, OMX_IndexParamVideoRv, ¶mRV, sizeof(paramRV)); if (err != OK) return err; if (version == kTypeRVVer_G2) { paramRV.eFormat = OMX_VIDEO_RVFormatG2; } else if (version == kTypeRVVer_8) { paramRV.eFormat = OMX_VIDEO_RVFormat8; } else if (version == kTypeRVVer_9) { paramRV.eFormat = OMX_VIDEO_RVFormat9; } err = OMXhandle->setParameter( nodeID, OMX_IndexParamVideoRv, ¶mRV, sizeof(paramRV)); return err; }
void NuPlayer::HTTPLiveSource::onSessionNotify_l(const sp<AMessage> &msg) { int32_t what; CHECK(msg->findInt32("what", &what)); if(what == LiveSession::kWhatPicture) { sp<ABuffer> metabuffer; CHECK(msg->findBuffer("buffer", &metabuffer)); AString mimeType; sp<ABuffer> buffer; if(((metabuffer)->meta()->findString("mime", &mimeType)) && ((metabuffer)->meta()->findBuffer("pictureBuffer", &buffer))) { if (mMetaData == NULL) { mMetaData = new MetaData; } mMetaData->setCString(kKeyAlbumArtMIME, mimeType.c_str()); mMetaData->setData(kKeyAlbumArt, MetaData::TYPE_NONE, buffer->data(), buffer->size()); ALOGI("kKeyAlbumArt set Data :%s, datasize:%d", mimeType.c_str(), buffer->size()); sp<AMessage> notify = dupNotify(); notify->setInt32("what", NuPlayer::Source::kWhatPicture); notify->post(); } } else if (what == LiveSession::kWhatBufferingStart) { sp<AMessage> notify = dupNotify(); notify->setInt32("what", kWhatBufferingStart); notify->post(); } else if (what == LiveSession::kWhatBufferingEnd) { sp<AMessage> notify = dupNotify(); notify->setInt32("what", kWhatBufferingEnd); notify->post(); } }
void RTSPSource::onDisconnected(const sp<AMessage> &msg) { status_t err; CHECK(msg != NULL); CHECK(msg->findInt32("result", &err)); if ((mLooper != NULL) && (mHandler != NULL)) { mLooper->unregisterHandler(mHandler->id()); mHandler.clear(); } mState = DISCONNECTED; mFinalResult = err; if (mDisconnectReplyID != 0) { finishDisconnectIfPossible(); } if (mListener) { nsresult reason = (err == OK) ? NS_OK : NS_ERROR_NET_TIMEOUT; mListener->OnDisconnected(0, reason); // Break the cycle reference between RtspController and us. mListener = nullptr; } mAudioTrack = NULL; mVideoTrack = NULL; mTracks.clear(); }
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatRepeatLastFrame: { Mutex::Autolock autoLock(mMutex); int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mRepeatLastFrameGeneration) { // stale break; } if (!mExecuting || mNumFramesAvailable > 0) { break; } bool success = repeatLatestBuffer_l(); if (success) { ALOGV("repeatLatestBuffer_l SUCCESS"); } else { ALOGV("repeatLatestBuffer_l FAILURE"); mRepeatBufferDeferred = true; } break; } default: TRESPASS(); } }
void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { int32_t audio; CHECK(msg->findInt32("audio", &audio)); // If we're currently syncing the queues, i.e. dropping audio while // aligning the first audio/video buffer times and only one of the // two queues has data, we may starve that queue by not requesting // more buffers from the decoder. If the other source then encounters // a discontinuity that leads to flushing, we'll never find the // corresponding discontinuity on the other queue. // Therefore we'll stop syncing the queues if at least one of them // is flushed. syncQueuesDone(); if (audio) { flushQueue(&mAudioQueue); Mutex::Autolock autoLock(mFlushLock); mFlushingAudio = false; mDrainAudioQueuePending = false; ++mAudioQueueGeneration; } else { flushQueue(&mVideoQueue); Mutex::Autolock autoLock(mFlushLock); mFlushingVideo = false; mDrainVideoQueuePending = false; ++mVideoQueueGeneration; } notifyFlushComplete(audio); }
void ARTSPConnection::onCompleteConnection(const sp<AMessage> &msg) { sp<AMessage> reply; CHECK(msg->findMessage("reply", &reply)); int32_t connectionID; CHECK(msg->findInt32("connection-id", &connectionID)); if ((connectionID != mConnectionID) || mState != CONNECTING) { // While we were attempting to connect, the attempt was // cancelled. reply->setInt32("result", -ECONNABORTED); reply->post(); return; } struct timeval tv; tv.tv_sec = 0; tv.tv_usec = kSelectTimeoutUs; fd_set ws; FD_ZERO(&ws); FD_SET(mSocket, &ws); int res = select(mSocket + 1, NULL, &ws, NULL, &tv); CHECK_GE(res, 0); if (res == 0) { // Timed out. Not yet connected. msg->post(); return; } int err; socklen_t optionLen = sizeof(err); CHECK_EQ(getsockopt(mSocket, SOL_SOCKET, SO_ERROR, &err, &optionLen), 0); CHECK_EQ(optionLen, (socklen_t)sizeof(err)); if (err != 0) { LOGE("err = %d (%s)", err, strerror(err)); reply->setInt32("result", -err); mState = DISCONNECTED; if (mUIDValid) { HTTPBase::UnRegisterSocketUserTag(mSocket); } close(mSocket); mSocket = -1; } else { reply->setInt32("result", OK); mState = CONNECTED; mNextCSeq = 1; postReceiveReponseEvent(); } reply->post(); }
static int32_t FindInt32(sp<MetaData>& mMetaData, uint32_t mKey) { int32_t value; if (!mMetaData->findInt32(mKey, &value)) return 0; return value; }
void Serializer::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatAddSource: { ssize_t index = onAddSource(msg); sp<AMessage> response = new AMessage; if (index < 0) { response->setInt32("err", index); } else { response->setSize("index", index); } uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; } case kWhatStart: case kWhatStop: { status_t err = (msg->what() == kWhatStart) ? onStart() : onStop(); sp<AMessage> response = new AMessage; response->setInt32("err", err); uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; } case kWhatPoll: { int32_t generation; CHECK(msg->findInt32("generation", &generation)); if (generation != mPollGeneration) { break; } int64_t delayUs = onPoll(); if (delayUs >= 0ll) { schedulePoll(delayUs); } break; } default: TRESPASS(); } }
nsresult OMXVideoEncoder::ConfigureDirect(sp<AMessage>& aFormat, BlobFormat aBlobFormat) { // We now allow re-configuration to handle resolution/framerate/etc changes if (mStarted) { Stop(); } MOZ_ASSERT(!mStarted, "OMX Stop() failed?"); int width = 0; int height = 0; int frameRate = 0; aFormat->findInt32("width", &width); aFormat->findInt32("height", &height); aFormat->findInt32("frame-rate", &frameRate); NS_ENSURE_TRUE(width > 0 && height > 0 && frameRate > 0, NS_ERROR_INVALID_ARG); // Limitation of soft AVC/H.264 encoder running on emulator in stagefright. static bool emu = IsRunningOnEmulator(); if (emu) { if (width > 352 || height > 288) { CODEC_ERROR("SoftAVCEncoder doesn't support resolution larger than CIF"); return NS_ERROR_INVALID_ARG; } aFormat->setInt32("level", OMX_VIDEO_AVCLevel2); aFormat->setInt32("bitrate-mode", OMX_Video_ControlRateVariable); } status_t result = mCodec->configure(aFormat, nullptr, nullptr, MediaCodec::CONFIGURE_FLAG_ENCODE); NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE); mWidth = width; mHeight = height; mBlobFormat = aBlobFormat; result = Start(); return result == OK ? NS_OK : NS_ERROR_FAILURE; }
void NuPlayer::DashMpdSource::onSessionNotify(const sp<AMessage> &msg) { int32_t what; CHECK(msg->findInt32("what", &what)); switch (what) { case DashSession::kWhatPrepared: { notifyVideoSizeChanged(0, 0); uint32_t flags = FLAG_CAN_PAUSE; if (mDashSession->isSeekable()) { flags |= FLAG_CAN_SEEK; flags |= FLAG_CAN_SEEK_BACKWARD; flags |= FLAG_CAN_SEEK_FORWARD; } if (mDashSession->hasDynamicDuration()) { flags |= FLAG_DYNAMIC_DURATION; } notifyFlagsChanged(flags); notifyPrepared(); break; }; case DashSession::kWhatPreparationFailed: { status_t err; CHECK(msg->findInt32("err", &err)); notifyPrepared(err); break; }; default: TRESPASS(); }; };