status_t SurfaceTexture::convert() { if (!mNeedsConversion) return NO_ERROR; if (mConversionBltSlot < 0 || mConversionBltSlot >= BufferQueue::NUM_BLIT_BUFFER_SLOTS || mConversionSrcSlot < 0 || mConversionSrcSlot >= BufferQueue::NUM_BUFFER_SLOTS) { ALOGE_IF(STE_DEFERDBG, "%s: Incorrect setup for deferred " "texture conversion:\n" "mConversionSrcSlot=%d mConversionBltSlot=%d", __FUNCTION__, mConversionSrcSlot, mConversionBltSlot); return BAD_VALUE; } if (mEglSlots[mConversionSrcSlot].mGraphicBuffer == NULL) { ALOGI_IF(STE_DEFERDBG, "%s: NULL source for deferred texture conversion.", __FUNCTION__); return OK; } if (mBlitSlots[mConversionBltSlot].mGraphicBuffer == NULL) { ALOGI_IF(STE_DEFERDBG, "%s: NULL destination for deferred " "texture conversion.", __FUNCTION__); return OK; } return convert(mEglSlots[mConversionSrcSlot].mGraphicBuffer, mBlitSlots[mConversionBltSlot].mGraphicBuffer); }
// This function reads the sysfs node to read MDP capabilities // and parses and updates information accordingly. bool MDPVersion::updateSplitInfo() { if(mMDPVersion >= MDSS_V5) { char split[64] = {0}; FILE* fp = fopen("/sys/class/graphics/fb0/msm_fb_split", "r"); if(fp){ //Format "left right" space as delimiter if(fread(split, sizeof(char), 64, fp)) { split[sizeof(split) - 1] = '\0'; mSplit.mLeft = atoi(split); ALOGI_IF(mSplit.mLeft, "Left Split=%d", mSplit.mLeft); char *rght = strpbrk(split, " "); if(rght) mSplit.mRight = atoi(rght + 1); ALOGI_IF(mSplit.mRight, "Right Split=%d", mSplit.mRight); } } else { ALOGE("Failed to open mdss_fb_split node"); return false; } if(fp) fclose(fp); } return true; }
/* * Enabling or disabling kernel driver for Proximity Sensor. */ int ProximitySensor::enable (int32_t handle, int en) { int newState = en ? 1 : 0; int err = 0; if (newState != mEnabled) { if (!mEnabled && dev_name != NULL) { open_device(); } char sysfs [PATH_MAX]; strcpy (sysfs, I2C); strcat (sysfs, "value_now"); ALOGI_IF (DEBUG, "proximitysensor enable.open(%s), en (%d)", sysfs, en); int fd = open (sysfs, O_RDWR); if (fd < 0) { ALOGE ("proximitysensor couldn't open '%s' input device", sysfs); err = -1; } else { //sensor enabling in sysfs char buf [2]; buf [0] = newState ? '1' : '0'; buf [1] = '\0'; write (fd, buf, sizeof(buf)); close (fd); setInitialState(); // ALOGE("srf02 - enable sensor in sysfs \n "); } } mEnabled = newState; mHasPendingEvent = true; if (!mEnabled && dev_name != NULL) { close_device(); } return err; }
status_t AudioPlayer::start(bool sourceAlreadyStarted) { CHECK(!mStarted); CHECK(mSource != NULL); status_t err; if (!sourceAlreadyStarted) { err = mSource->start(); if (err != OK) { return err; } } // We allow an optional INFO_FORMAT_CHANGED at the very beginning // of playback, if there is one, getFormat below will retrieve the // updated format, if there isn't, we'll stash away the valid buffer // of data to be used on the first audio callback. CHECK(mFirstBuffer == NULL); MediaSource::ReadOptions options; if (mSeeking) { options.setSeekTo(mSeekTimeUs); mSeeking = false; } mFirstBufferResult = mSource->read(&mFirstBuffer, &options); if (mFirstBufferResult == INFO_FORMAT_CHANGED) { ALOGV("INFO_FORMAT_CHANGED!!!"); CHECK(mFirstBuffer == NULL); mFirstBufferResult = OK; mIsFirstBuffer = false; } else { mIsFirstBuffer = true; } sp<MetaData> format = mSource->getFormat(); const char *mime; bool success = format->findCString(kKeyMIMEType, &mime); CHECK(success); CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)); success = format->findInt32(kKeySampleRate, &mSampleRate); CHECK(success); int32_t numChannels, channelMask; success = format->findInt32(kKeyChannelCount, &numChannels); CHECK(success); if(!format->findInt32(kKeyChannelMask, &channelMask)) { // log only when there's a risk of ambiguity of channel mask selection ALOGI_IF(numChannels > 2, "source format didn't specify channel mask, using (%d) channel order", numChannels); channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; } if (mAudioSink.get() != NULL) { status_t err = mAudioSink->open( mSampleRate, numChannels, channelMask, AUDIO_FORMAT_PCM_16_BIT, DEFAULT_AUDIOSINK_BUFFERCOUNT, &AudioPlayer::AudioSinkCallback, this, (mAllowDeepBuffering ? AUDIO_OUTPUT_FLAG_DEEP_BUFFER : AUDIO_OUTPUT_FLAG_NONE)); if (err != OK) { if (mFirstBuffer != NULL) { mFirstBuffer->release(); mFirstBuffer = NULL; } if (!sourceAlreadyStarted) { mSource->stop(); } return err; } mLatencyUs = (int64_t)mAudioSink->latency() * 1000; mFrameSize = mAudioSink->frameSize(); mAudioSink->start(); } else { // playing to an AudioTrack, set up mask if necessary audio_channel_mask_t audioMask = channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER ? audio_channel_out_mask_from_count(numChannels) : channelMask; if (0 == audioMask) { return BAD_VALUE; } mAudioTrack = new AudioTrack( AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask, 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0); if ((err = mAudioTrack->initCheck()) != OK) { delete mAudioTrack; mAudioTrack = NULL; if (mFirstBuffer != NULL) { mFirstBuffer->release(); mFirstBuffer = NULL; } if (!sourceAlreadyStarted) { mSource->stop(); } return err; } mLatencyUs = (int64_t)mAudioTrack->latency() * 1000; mFrameSize = mAudioTrack->frameSize(); mAudioTrack->start(); } mStarted = true; mPinnedTimeUs = -1ll; return OK; }
status_t TunnelPlayer::start(bool sourceAlreadyStarted) { CHECK(!mStarted); CHECK(mSource != NULL); ALOGV("start: sourceAlreadyStarted %d", sourceAlreadyStarted); //Check if the source is started, start it status_t err; if (!sourceAlreadyStarted) { err = mSource->start(); if (err != OK) { return err; } } //Create extractor thread, read and initialize all the //mutexes and coditional variables createThreads(); ALOGV("Thread Created."); // We allow an optional INFO_FORMAT_CHANGED at the very beginning // of playback, if there is one, getFormat below will retrieve the // updated format, if there isn't, we'll stash away the valid buffer // of data to be used on the first audio callback. CHECK(mFirstBuffer == NULL); MediaSource::ReadOptions options; if (mSeeking) { options.setSeekTo(mSeekTimeUs); mSeeking = false; } mFirstBufferResult = mSource->read(&mFirstBuffer, &options); if (mFirstBufferResult == INFO_FORMAT_CHANGED) { ALOGV("INFO_FORMAT_CHANGED!!!"); CHECK(mFirstBuffer == NULL); mFirstBufferResult = OK; mIsFirstBuffer = false; } else { mIsFirstBuffer = true; } sp<MetaData> format = mSource->getFormat(); const char *mime; bool success = format->findCString(kKeyMIMEType, &mime); if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AAC)) { mFormat = AUDIO_FORMAT_AAC; } else if (!strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_MPEG)) { mFormat = AUDIO_FORMAT_MP3; ALOGD("TunnelPlayer::start AUDIO_FORMAT_MP3"); } else { ALOGE("TunnelPlayer::UNSUPPORTED"); } CHECK(success); success = format->findInt32(kKeySampleRate, &mSampleRate); CHECK(success); success = format->findInt32(kKeyChannelCount, &numChannels); CHECK(success); if(!format->findInt32(kKeyChannelMask, &mChannelMask)) { // log only when there's a risk of ambiguity of channel mask selection ALOGI_IF(numChannels > 2, "source format didn't specify channel mask, using (%d) channel order", numChannels); mChannelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; } audio_output_flags_t flags = (audio_output_flags_t) (AUDIO_OUTPUT_FLAG_TUNNEL | AUDIO_OUTPUT_FLAG_DIRECT); ALOGV("mAudiosink->open() mSampleRate %d, numChannels %d, mChannelMask %d, flags %d",mSampleRate, numChannels, mChannelMask, flags); err = mAudioSink->open( mSampleRate, numChannels, mChannelMask, mFormat, DEFAULT_AUDIOSINK_BUFFERCOUNT, &TunnelPlayer::AudioSinkCallback, this, flags, NULL); if (err != OK) { if (mFirstBuffer != NULL) { mFirstBuffer->release(); mFirstBuffer = NULL; } if (!sourceAlreadyStarted) { mSource->stop(); } ALOGE("Opening a routing session failed"); return err; } mIsAudioRouted = true; mStarted = true; mAudioSink->start(); mLock.lock(); ALOGV("Waking up extractor thread"); mExtractorCv.signal(); mLock.unlock(); return OK; }
status_t AudioPlayer::start(bool sourceAlreadyStarted) { CHECK(!mStarted); CHECK(mSource != NULL); status_t err; if (!sourceAlreadyStarted) { mSourcePaused = false; err = mSource->start(); if (err != OK) { return err; } } // We allow an optional INFO_FORMAT_CHANGED at the very beginning // of playback, if there is one, getFormat below will retrieve the // updated format, if there isn't, we'll stash away the valid buffer // of data to be used on the first audio callback. CHECK(mFirstBuffer == NULL); MediaSource::ReadOptions options; if (mSeeking) { options.setSeekTo(mSeekTimeUs); mSeeking = false; } mFirstBufferResult = mSource->read(&mFirstBuffer, &options); if (mFirstBufferResult == INFO_FORMAT_CHANGED) { ALOGV("INFO_FORMAT_CHANGED!!!"); CHECK(mFirstBuffer == NULL); mFirstBufferResult = OK; mIsFirstBuffer = false; } else { mIsFirstBuffer = true; } sp<MetaData> format = mSource->getFormat(); const char *mime; bool success = format->findCString(kKeyMIMEType, &mime); CHECK(success); CHECK(useOffload() || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)); success = format->findInt32(kKeySampleRate, &mSampleRate); CHECK(success); int32_t numChannels, channelMask; success = format->findInt32(kKeyChannelCount, &numChannels); CHECK(success); if(!format->findInt32(kKeyChannelMask, &channelMask)) { // log only when there's a risk of ambiguity of channel mask selection ALOGI_IF(numChannels > 2, "source format didn't specify channel mask, using (%d) channel order", numChannels); channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; } audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; if (useOffload()) { if (mapMimeToAudioFormat(audioFormat, mime) != OK) { ALOGE("Couldn't map mime type \"%s\" to a valid AudioSystem::audio_format", mime); audioFormat = AUDIO_FORMAT_INVALID; } else { #ifdef QCOM_HARDWARE // Override audio format for PCM offload if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) { audioFormat = AUDIO_FORMAT_PCM_16_BIT_OFFLOAD; } #endif ALOGV("Mime type \"%s\" mapped to audio_format 0x%x", mime, audioFormat); } } int avgBitRate = -1; format->findInt32(kKeyBitRate, &avgBitRate); if (mAudioSink.get() != NULL) { uint32_t flags = AUDIO_OUTPUT_FLAG_NONE; audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER; if (allowDeepBuffering()) { flags |= AUDIO_OUTPUT_FLAG_DEEP_BUFFER; } if (useOffload()) { flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; int64_t durationUs; if (format->findInt64(kKeyDuration, &durationUs)) { offloadInfo.duration_us = durationUs; } else { offloadInfo.duration_us = -1; } offloadInfo.sample_rate = mSampleRate; offloadInfo.channel_mask = channelMask; offloadInfo.format = audioFormat; offloadInfo.stream_type = AUDIO_STREAM_MUSIC; offloadInfo.bit_rate = avgBitRate; offloadInfo.has_video = ((mCreateFlags & HAS_VIDEO) != 0); offloadInfo.is_streaming = ((mCreateFlags & IS_STREAMING) != 0); } status_t err = mAudioSink->open( mSampleRate, numChannels, channelMask, audioFormat, DEFAULT_AUDIOSINK_BUFFERCOUNT, &AudioPlayer::AudioSinkCallback, this, (audio_output_flags_t)flags, useOffload() ? &offloadInfo : NULL); if (err == OK) { mLatencyUs = (int64_t)mAudioSink->latency() * 1000; mFrameSize = mAudioSink->frameSize(); if (useOffload()) { // If the playback is offloaded to h/w we pass the // HAL some metadata information // We don't want to do this for PCM because it will be going // through the AudioFlinger mixer before reaching the hardware sendMetaDataToHal(mAudioSink, format); } err = mAudioSink->start(); // do not alter behavior for non offloaded tracks: ignore start status. if (!useOffload()) { err = OK; } } if (err != OK) { if (mFirstBuffer != NULL) { mFirstBuffer->release(); mFirstBuffer = NULL; } if (!sourceAlreadyStarted) { mSource->stop(); } return err; } } else { // playing to an AudioTrack, set up mask if necessary audio_channel_mask_t audioMask = channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER ? audio_channel_out_mask_from_count(numChannels) : channelMask; if (0 == audioMask) { return BAD_VALUE; } mAudioTrack = new AudioTrack( AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask, 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0); if ((err = mAudioTrack->initCheck()) != OK) { mAudioTrack.clear(); if (mFirstBuffer != NULL) { mFirstBuffer->release(); mFirstBuffer = NULL; } if (!sourceAlreadyStarted) { mSource->stop(); } return err; } mLatencyUs = (int64_t)mAudioTrack->latency() * 1000; mFrameSize = mAudioTrack->frameSize(); mAudioTrack->start(); } mStarted = true; mPlaying = true; mPinnedTimeUs = -1ll; const char *componentName; if (!(format->findCString(kKeyDecoderComponent, &componentName))) { componentName = "none"; } if (!strncmp(componentName, "OMX.qcom.", 9)) { mPauseRequired = true; } else { mPauseRequired = false; } return OK; }
bool IntelDisplayDevice::updateLayersData(hwc_display_contents_1_t *list) { IntelDisplayPlane *plane = 0; bool ret = true; bool handled = true; mYUVOverlay = -1; if (!list) return false; for (size_t i=0 ; i<(size_t)mLayerList->getLayersCount(); i++) { hwc_layer_1_t *layer = &list->hwLayers[i]; // layer safety check if (!isHWCLayer(layer) || !layer) continue; IMG_native_handle_t *grallocHandle = (IMG_native_handle_t*)layer->handle; // check plane plane = mLayerList->getPlane(i); if (!plane) continue; // get layer parameter int bobDeinterlace; int srcX = layer->sourceCrop.left; int srcY = layer->sourceCrop.top; int srcWidth = layer->sourceCrop.right - layer->sourceCrop.left; int srcHeight = layer->sourceCrop.bottom - layer->sourceCrop.top; int planeType = plane->getPlaneType(); if(srcHeight == 1 || srcWidth == 1) { mLayerList->detachPlane(i, plane); layer->compositionType = HWC_FRAMEBUFFER; handled = false; continue; } if (planeType == IntelDisplayPlane::DISPLAY_PLANE_OVERLAY) { if (mDrm->isOverlayOff()) { plane->disable(); handled = false; layer->compositionType = HWC_FRAMEBUFFER; continue; } } // get & setup data buffer and buffer format IntelDisplayBuffer *buffer = plane->getDataBuffer(); IntelDisplayDataBuffer *dataBuffer = reinterpret_cast<IntelDisplayDataBuffer*>(buffer); if (!dataBuffer) { ALOGE("%s: invalid data buffer\n", __func__); continue; } int bufferWidth = grallocHandle->iWidth; int bufferHeight = grallocHandle->iHeight; uint32_t bufferHandle = grallocHandle->fd[0]; int format = grallocHandle->iFormat; uint32_t transform = layer->transform; if (planeType == IntelDisplayPlane::DISPLAY_PLANE_OVERLAY) { int flags = mLayerList->getFlags(i); if (flags & IntelDisplayPlane::DELAY_DISABLE) { ALOGD_IF(ALLOW_HWC_PRINT, "updateLayerData: disable plane (DELAY)!"); flags &= ~IntelDisplayPlane::DELAY_DISABLE; mLayerList->setFlags(i, flags); plane->disable(); } //FIXME: is a workaround // Bypass overlay layer, if // device is rotated // and not presentation mode // and video is not only attached to HDMI if (list && (list->flags & HWC_ROTATION_IN_PROGRESS) && (mDrm->getDisplayMode() == OVERLAY_EXTEND && !mDrm->isPresentationMode() && !mDrm->onlyHdmiHasVideo())) { ALOGI_IF(ALLOW_HWC_PRINT, "Bypass overlay layer"); mLayerList->detachPlane(i, plane); layer->compositionType = HWC_OVERLAY; handled = false; continue; } // check if can switch to overlay bool useOverlay = useOverlayRotation(layer, i, bufferHandle, bufferWidth, bufferHeight, srcX, srcY, srcWidth, srcHeight, transform); if (!useOverlay) { ALOGD_IF(ALLOW_HWC_PRINT, "updateLayerData: useOverlayRotation failed!"); if (!mLayerList->getForceOverlay(i)) { ALOGD_IF(ALLOW_HWC_PRINT, "updateLayerData: fallback to ST to do rendering!"); // fallback to ST to render this frame layer->compositionType = HWC_FRAMEBUFFER; mForceSwapBuffer = true; handled = false; } // disable overlay when rotated buffer is not ready flags |= IntelDisplayPlane::DELAY_DISABLE; mLayerList->setFlags(i, flags); continue; } bobDeinterlace = isBobDeinterlace(layer); if (bobDeinterlace) { flags |= IntelDisplayPlane::BOB_DEINTERLACE; } else { flags &= ~IntelDisplayPlane::BOB_DEINTERLACE; } mLayerList->setFlags(i, flags); // switch to overlay layer->compositionType = HWC_OVERLAY; // transformed buffer not from gralloc, can't use it's stride directly uint32_t grallocStride = !transform ? grallocHandle->iStride : align_to(bufferWidth, 32); int format = grallocHandle->iFormat; dataBuffer->setFormat(format); dataBuffer->setStride(grallocStride); dataBuffer->setWidth(bufferWidth); dataBuffer->setHeight(bufferHeight); dataBuffer->setCrop(srcX, srcY, srcWidth, srcHeight); dataBuffer->setDeinterlaceType(bobDeinterlace); // set the data buffer back to plane ret = ((IntelOverlayPlane*)plane)->setDataBuffer(bufferHandle, transform, grallocHandle); if (!ret) { ALOGE("%s: failed to update overlay data buffer\n", __func__); mLayerList->detachPlane(i, plane); layer->compositionType = HWC_FRAMEBUFFER; handled = false; } if (layer->compositionType == HWC_OVERLAY && format == HAL_PIXEL_FORMAT_INTEL_HWC_NV12) mYUVOverlay = i; } else if (planeType == IntelDisplayPlane::DISPLAY_PLANE_RGB_OVERLAY) { IntelRGBOverlayPlane *rgbOverlayPlane = reinterpret_cast<IntelRGBOverlayPlane*>(plane); uint32_t yuvBufferHandle = rgbOverlayPlane->convert((uint32_t)grallocHandle, srcWidth, srcHeight, srcX, srcY); if (!yuvBufferHandle) { LOGE("updateLayersData: failed to convert\n"); continue; } grallocHandle = (IMG_native_handle_t*)yuvBufferHandle; bufferWidth = grallocHandle->iWidth; bufferHeight = grallocHandle->iHeight; bufferHandle = grallocHandle->fd[0]; format = grallocHandle->iFormat; uint32_t grallocStride = grallocHandle->iStride; dataBuffer->setFormat(format); dataBuffer->setStride(grallocStride); dataBuffer->setWidth(bufferWidth); dataBuffer->setHeight(bufferHeight); dataBuffer->setCrop(srcX, srcY, srcWidth, srcHeight); dataBuffer->setDeinterlaceType(0); // set the data buffer back to plane ret = rgbOverlayPlane->setDataBuffer(bufferHandle, 0, grallocHandle); if (!ret) { LOGE("%s: failed to update overlay data buffer\n", __func__); mLayerList->detachPlane(i, plane); layer->compositionType = HWC_FRAMEBUFFER; handled = false; } } else if (planeType == IntelDisplayPlane::DISPLAY_PLANE_SPRITE || planeType == IntelDisplayPlane::DISPLAY_PLANE_PRIMARY) { // adjust the buffer format if no blending is needed // some test cases would fail due to a weird format! if (layer->blending == HWC_BLENDING_NONE) { switch (format) { case HAL_PIXEL_FORMAT_BGRA_8888: format = HAL_PIXEL_FORMAT_BGRX_8888; break; case HAL_PIXEL_FORMAT_RGBA_8888: format = HAL_PIXEL_FORMAT_RGBX_8888; break; } } // set data buffer format dataBuffer->setFormat(format); dataBuffer->setWidth(bufferWidth); dataBuffer->setHeight(bufferHeight); dataBuffer->setCrop(srcX, srcY, srcWidth, srcHeight); // set the data buffer back to plane ret = plane->setDataBuffer(bufferHandle, transform, grallocHandle); if (!ret) { ALOGE("%s: failed to update sprite data buffer\n", __func__); mLayerList->detachPlane(i, plane); layer->compositionType = HWC_FRAMEBUFFER; handled = false; } } else { ALOGW("%s: invalid plane type %d\n", __func__, planeType); continue; } // clear layer's visible region if need clear up flag was set // and sprite plane was used as primary plane (point to FB) if (mLayerList->getNeedClearup(i) && mPlaneManager->primaryAvailable(0)) { ALOGD_IF(ALLOW_HWC_PRINT, "updateLayersData: clear visible region of layer %d", i); list->hwLayers[i].hints |= HWC_HINT_CLEAR_FB; } } return handled; }