LinkDialogProcessorGraphicsItem::LinkDialogProcessorGraphicsItem(Side side, Processor* processor) : GraphicsItemData<Processor>(nullptr, side, processor) { setZValue(linkdialog::processorDepth); setFlags(ItemSendsGeometryChanges); setRect(-linkdialog::processorWidth / 2, -linkdialog::processorHeight / 2, linkdialog::processorWidth, linkdialog::processorHeight); auto identifier = new LabelGraphicsItem(this); identifier->setPos(rect().topLeft() + QPointF(linkdialog::offset, linkdialog::offset)); identifier->setDefaultTextColor(Qt::white); auto idFont = QFont("Segoe", linkdialog::processorLabelHeight, QFont::Bold, false); idFont.setPixelSize(linkdialog::processorLabelHeight); identifier->setFont(idFont); identifier->setCrop(20, 19); auto classIdentifier = new LabelGraphicsItem(this); classIdentifier->setDefaultTextColor(Qt::lightGray); auto classFont = QFont("Segoe", linkdialog::processorLabelHeight, QFont::Normal, true); classFont.setPixelSize(linkdialog::processorLabelHeight); classIdentifier->setFont(classFont); classIdentifier->setCrop(20, 19); auto offset = classIdentifier->boundingRect().height(); classIdentifier->setPos(rect().bottomLeft() + QPointF(linkdialog::offset, -linkdialog::offset - offset)); identifier->setText(QString::fromStdString(processor->getIdentifier())); classIdentifier->setText(QString::fromStdString(processor->getClassIdentifier())); for (auto& property : processor->getProperties()) { auto item = new LinkDialogPropertyGraphicsItem(this, property); properties_.push_back(item); item->hide(); item->setParentItem(this); } LinkDialogTreeItem* prev = this; std::function<void(LinkDialogPropertyGraphicsItem*)> connect = [this, &connect, &prev]( LinkDialogPropertyGraphicsItem* item) { prev->setNext(item); item->setPrev(prev); prev = item; for (auto i : item->getSubPropertyItemList()) { connect(i); } }; for (auto item : properties_) connect(item); LinkDialogTreeItem* item = this; while (item) { item->updatePositions(); item = item->next(); } }
void HwcComposer2D::Prepare(buffer_handle_t fbHandle, int fence) { int idx = mList->numHwLayers - 1; const hwc_rect_t r = {0, 0, mScreenRect.width, mScreenRect.height}; hwc_display_contents_1_t *displays[HWC_NUM_DISPLAY_TYPES] = { nullptr }; displays[HWC_DISPLAY_PRIMARY] = mList; mList->outbufAcquireFenceFd = -1; mList->outbuf = nullptr; mList->retireFenceFd = -1; mList->hwLayers[idx].hints = 0; mList->hwLayers[idx].flags = 0; mList->hwLayers[idx].transform = 0; mList->hwLayers[idx].handle = fbHandle; mList->hwLayers[idx].blending = HWC_BLENDING_PREMULT; mList->hwLayers[idx].compositionType = HWC_FRAMEBUFFER_TARGET; setCrop(&mList->hwLayers[idx], r); mList->hwLayers[idx].displayFrame = r; mList->hwLayers[idx].visibleRegionScreen.numRects = 1; mList->hwLayers[idx].visibleRegionScreen.rects = &mList->hwLayers[idx].displayFrame; mList->hwLayers[idx].acquireFenceFd = fence; mList->hwLayers[idx].releaseFenceFd = -1; #if ANDROID_VERSION >= 18 mList->hwLayers[idx].planeAlpha = 0xFF; #endif if (mPrepared) { LOGE("Multiple hwc prepare calls!"); } mHwc->prepare(mHwc, HWC_NUM_DISPLAY_TYPES, displays); mPrepared = true; }
LinkDialogProcessorGraphicsItem::LinkDialogProcessorGraphicsItem(Side side, Processor* processor) : GraphicsItemData<Processor>(side, processor), animateExpansion_(1.0) { setZValue(linkdialog::processorDepth); setFlags(ItemSendsGeometryChanges); setRect(-linkdialog::processorWidth / 2, -linkdialog::processorHeight / 2, linkdialog::processorWidth, linkdialog::processorHeight); auto identifier = new LabelGraphicsItem(this); identifier->setPos(rect().topLeft() + QPointF(linkdialog::offset, linkdialog::offset)); identifier->setDefaultTextColor(Qt::white); auto idFont = QFont("Segoe", linkdialog::processorLabelHeight, QFont::Bold, false); idFont.setPixelSize(linkdialog::processorLabelHeight); identifier->setFont(idFont); identifier->setCrop(20, 19); auto classIdentifier = new LabelGraphicsItem(this); classIdentifier->setDefaultTextColor(Qt::lightGray); auto classFont = QFont("Segoe", linkdialog::processorLabelHeight, QFont::Normal, true); classFont.setPixelSize(linkdialog::processorLabelHeight); classIdentifier->setFont(classFont); classIdentifier->setCrop(20, 19); auto offset = classIdentifier->boundingRect().height(); classIdentifier->setPos(rect().bottomLeft() + QPointF(linkdialog::offset, -linkdialog::offset - offset)); identifier->setText(QString::fromStdString(processor->getIdentifier())); classIdentifier->setText(QString::fromStdString(processor->getClassIdentifier())); QPointF newPos(0.0f, rect().height()); for (auto& property : processor->getProperties()) { auto item = new LinkDialogPropertyGraphicsItem(this, property); properties_.push_back(item); item->setParentItem(this); item->setPos(newPos); size_t count = 1 + item->getTotalVisibleChildCount(); newPos += QPointF(0, count * linkdialog::propertyHeight); item->show(); } }
int SurfaceTextureClient::dispatchSetCrop(va_list args) { android_native_rect_t const* rect = va_arg(args, android_native_rect_t*); return setCrop(reinterpret_cast<Rect const*>(rect)); }
void Flipbook::resetCrop() { setCrop(QRectF()); }
ErrVal H264AVCDecoderTest::go() { PicBuffer* pcPicBuffer = NULL; PicBufferList cPicBufferOutputList; PicBufferList cPicBufferUnusedList; PicBufferList cPicBufferReleaseList; UInt uiMbX = 0; UInt uiMbY = 0; UInt uiNalUnitType = 0; UInt uiSize = 0; UInt uiLumOffset = 0; UInt uiCbOffset = 0; UInt uiCrOffset = 0; UInt uiFrame; Bool bEOS = false; Bool bYuvDimSet = false; // HS: packet trace UInt uiMaxPocDiff = m_pcParameter->uiMaxPocDiff; UInt uiLastPoc = MSYS_UINT_MAX; UChar* pcLastFrame = 0; UInt uiPreNalUnitType = 0; cPicBufferOutputList.clear(); cPicBufferUnusedList.clear(); RNOK( m_pcH264AVCDecoder->init(true, m_pcParameter) ); Bool bToDecode = false; //JVT-P031 for( uiFrame = 0; ( uiFrame <= MSYS_UINT_MAX && ! bEOS); ) { BinData* pcBinData; BinDataAccessor cBinDataAccessor; Int iPos; // Bool bFinishChecking; RNOK( m_pcReadBitstream->getPosition(iPos) ); //JVT-P031 Bool bFragmented = false; Bool bDiscardable = false; Bool bStart = false; Bool bFirst = true; UInt uiTotalLength = 0; #define MAX_FRAGMENTS 10 // hard-coded BinData* pcBinDataTmp[MAX_FRAGMENTS]; BinDataAccessor cBinDataAccessorTmp[MAX_FRAGMENTS]; UInt uiFragNb, auiStartPos[MAX_FRAGMENTS], auiEndPos[MAX_FRAGMENTS]; Bool bConcatenated = false; //FRAG_FIX_3 Bool bSkip = false; // Dong: To skip unknown NAL unit types uiFragNb = 0; bEOS = false; pcBinData = 0; while(!bStart && !bEOS) { if(bFirst) { RNOK( m_pcReadBitstream->setPosition(iPos) ); bFirst = false; } RNOK( m_pcReadBitstream->extractPacket( pcBinDataTmp[uiFragNb], bEOS ) ); //TMM_EC {{ if( !bEOS && ((pcBinDataTmp[uiFragNb]->data())[0] & 0x1f )== 0x0b) { printf("end of stream\n"); bEOS=true; uiNalUnitType= uiPreNalUnitType; RNOK( m_pcReadBitstream->releasePacket( pcBinDataTmp[uiFragNb] ) ); pcBinDataTmp[uiFragNb] = new BinData; uiTotalLength = 0; pcBinDataTmp[uiFragNb]->set( new UChar[uiTotalLength], uiTotalLength ); } //TMM_EC }} pcBinDataTmp[uiFragNb]->setMemAccessor( cBinDataAccessorTmp[uiFragNb] ); bSkip = false; // open the NAL Unit, determine the type and if it's a slice get the frame size RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessorTmp[uiFragNb], uiNalUnitType, uiMbX, uiMbY, uiSize, true, false, //FRAG_FIX_3 // bStart, auiStartPos[uiFragNb], auiEndPos[uiFragNb], bFragmented, bDiscardable ) ); bStart, auiStartPos[uiFragNb], auiEndPos[uiFragNb], bFragmented, bDiscardable, this->m_pcParameter->getNumOfViews(), bSkip ) ); uiTotalLength += auiEndPos[uiFragNb] - auiStartPos[uiFragNb]; // Dong: Skip unknown NAL units if( bSkip ) { printf("Unknown NAL unit type: %d\n", uiNalUnitType); uiTotalLength -= (auiEndPos[uiFragNb] - auiStartPos[uiFragNb]); } else if(!bStart) { ROT( bEOS) ; //[email protected] uiFragNb++; } else { if(pcBinDataTmp[0]->size() != 0) { pcBinData = new BinData; pcBinData->set( new UChar[uiTotalLength], uiTotalLength ); // append fragments UInt uiOffset = 0; for(UInt uiFrag = 0; uiFrag<uiFragNb+1; uiFrag++) { memcpy(pcBinData->data()+uiOffset, pcBinDataTmp[uiFrag]->data() + auiStartPos[uiFrag], auiEndPos[uiFrag]-auiStartPos[uiFrag]); uiOffset += auiEndPos[uiFrag]-auiStartPos[uiFrag]; RNOK( m_pcReadBitstream->releasePacket( pcBinDataTmp[uiFrag] ) ); pcBinDataTmp[uiFrag] = NULL; if(uiNalUnitType != 6) //JVT-T054 m_pcH264AVCDecoder->decreaseNumOfNALInAU(); //FRAG_FIX_3 if(uiFrag > 0) bConcatenated = true; //~FRAG_FIX_3 } pcBinData->setMemAccessor( cBinDataAccessor ); bToDecode = false; if((uiTotalLength != 0) && (!bDiscardable || bFragmented)) { //FRAG_FIX if( (uiNalUnitType == 20) || (uiNalUnitType == 21) || (uiNalUnitType == 1) || (uiNalUnitType == 5) ) { uiPreNalUnitType=uiNalUnitType; RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessor, uiNalUnitType, uiMbX, uiMbY, uiSize, //uiNonRequiredPic, //NonRequired JVT-Q066 false, bConcatenated, //FRAG_FIX_3 bStart, auiStartPos[uiFragNb+1], auiEndPos[uiFragNb+1], // bFragmented, bDiscardable) ); bFragmented, bDiscardable, this->m_pcParameter->getNumOfViews(), bSkip) ); } else if( uiNalUnitType == 14 ) { uiPreNalUnitType=uiNalUnitType; RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessor, uiNalUnitType, uiMbX, uiMbY, uiSize, //uiNonRequiredPic, //NonRequired JVT-Q066 false, bConcatenated, //FRAG_FIX_3 bStart, auiStartPos[uiFragNb+1], auiEndPos[uiFragNb+1], // bFragmented, bDiscardable) ); bFragmented, bDiscardable,this->m_pcParameter->getNumOfViews(), bSkip) ); } else m_pcH264AVCDecoder->initPacket( &cBinDataAccessor ); bToDecode = true; if( uiNalUnitType == 14 ) bToDecode = false; } } } } //~JVT-P031 //NonRequired JVT-Q066{ if(m_pcH264AVCDecoder->isNonRequiredPic()) continue; //NonRequired JVT-Q066} // JVT-Q054 Red. Picture { RNOK( m_pcH264AVCDecoder->checkRedundantPic() ); if ( m_pcH264AVCDecoder->isRedundantPic() ) continue; // JVT-Q054 Red. Picture } if(bToDecode)//JVT-P031 { // get new picture buffer if required if coded Slice || coded IDR slice pcPicBuffer = NULL; if( uiNalUnitType == 1 || uiNalUnitType == 5 || uiNalUnitType == 20 || uiNalUnitType == 21 ) { RNOK( xGetNewPicBuffer( pcPicBuffer, uiSize ) ); if( ! bYuvDimSet ) { UInt uiLumSize = ((uiMbX<<3)+ YUV_X_MARGIN) * ((uiMbY<<3) + YUV_Y_MARGIN ) * 4; uiLumOffset = ((uiMbX<<4)+2*YUV_X_MARGIN) * YUV_Y_MARGIN + YUV_X_MARGIN; uiCbOffset = ((uiMbX<<3)+ YUV_X_MARGIN) * YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + uiLumSize; uiCrOffset = ((uiMbX<<3)+ YUV_X_MARGIN) * YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + 5*uiLumSize/4; bYuvDimSet = true; // HS: decoder robustness pcLastFrame = new UChar [uiSize]; ROF( pcLastFrame ); } } // decode the NAL unit RNOK( m_pcH264AVCDecoder->process( pcPicBuffer, cPicBufferOutputList, cPicBufferUnusedList, cPicBufferReleaseList ) ); // ROI DECODE ICU/ETRI m_pcH264AVCDecoder->RoiDecodeInit(); setCrop();//lufeng: support frame cropping // picture output while( ! cPicBufferOutputList.empty() ) { //JVT-V054 if(!m_pcWriteYuv->getFileInitDone() ) { //UInt *vcOrder = m_pcH264AVCDecoder->getViewCodingOrder(); UInt *vcOrder = m_pcH264AVCDecoder->getViewCodingOrder_SubStream(); if(vcOrder == NULL)//lufeng: in order to output non-MVC seq { //UInt order=0; m_pcH264AVCDecoder->addViewCodingOrder(); //vcOrder = m_pcH264AVCDecoder->getViewCodingOrder(); vcOrder = m_pcH264AVCDecoder->getViewCodingOrder_SubStream(); } m_pcWriteYuv->xInitMVC(m_pcParameter->cYuvFile, vcOrder, m_pcParameter->getNumOfViews()); // JVT-AB024 modified remove active view info SEI } PicBuffer* pcPicBufferTmp = cPicBufferOutputList.front(); cPicBufferOutputList.pop_front(); if( pcPicBufferTmp != NULL ) { // HS: decoder robustness while( uiLastPoc + uiMaxPocDiff < (UInt)pcPicBufferTmp->getCts() ) { RNOK( m_pcWriteYuv->writeFrame( pcLastFrame + uiLumOffset, pcLastFrame + uiCbOffset, pcLastFrame + uiCrOffset, uiMbY << 4, uiMbX << 4, (uiMbX << 4)+ YUV_X_MARGIN*2 ) ); printf("REPEAT FRAME\n"); uiFrame ++; uiLastPoc += uiMaxPocDiff; } if(m_pcParameter->getNumOfViews() > 0) { UInt view_cnt; for (view_cnt=0; view_cnt < m_pcParameter->getNumOfViews(); view_cnt++){ //UInt tmp_order=m_pcH264AVCDecoder->getViewCodingOrder()[view_cnt]; UInt tmp_order=m_pcH264AVCDecoder->getViewCodingOrder_SubStream()[view_cnt]; if ((UInt)pcPicBufferTmp->getViewId() == tmp_order) break; } RNOK( m_pcWriteYuv->writeFrame( *pcPicBufferTmp + uiLumOffset, *pcPicBufferTmp + uiCbOffset, *pcPicBufferTmp + uiCrOffset, uiMbY << 4, uiMbX << 4, (uiMbX << 4)+ YUV_X_MARGIN*2, //(UInt)pcPicBufferTmp->getViewId(), view_cnt) ); } else RNOK( m_pcWriteYuv->writeFrame( *pcPicBufferTmp + uiLumOffset, *pcPicBufferTmp + uiCbOffset, *pcPicBufferTmp + uiCrOffset, uiMbY << 4, uiMbX << 4, (uiMbX << 4)+ YUV_X_MARGIN*2 ) ); uiFrame++; // HS: decoder robustness uiLastPoc = (UInt)pcPicBufferTmp->getCts(); ::memcpy( pcLastFrame, *pcPicBufferTmp+0, uiSize*sizeof(UChar) ); } } } RNOK( xRemovePicBuffer( cPicBufferReleaseList ) ); RNOK( xRemovePicBuffer( cPicBufferUnusedList ) ); if( pcBinData ) { RNOK( m_pcReadBitstream->releasePacket( pcBinData ) ); pcBinData = 0; } } printf("\n %d frames decoded\n", uiFrame ); delete [] pcLastFrame; // HS: decoder robustness RNOK( m_pcH264AVCDecoder->uninit( true ) ); m_pcParameter->nFrames = uiFrame; m_pcParameter->nResult = 0; return Err::m_nOK; }
int GonkNativeWindowClient::dispatchSetCrop(va_list args) { android_native_rect_t const* rect = va_arg(args, android_native_rect_t*); return setCrop(reinterpret_cast<Rect const*>(rect)); }
status_t BnGraphicBufferProducer::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { switch(code) { case REQUEST_BUFFER: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); int bufferIdx = data.readInt32(); sp<GraphicBuffer> buffer; int result = requestBuffer(bufferIdx, &buffer); reply->writeInt32(buffer != 0); if (buffer != 0) { reply->write(*buffer); } reply->writeInt32(result); return NO_ERROR; } break; case SET_BUFFER_COUNT: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); int bufferCount = data.readInt32(); int result = setBufferCount(bufferCount); reply->writeInt32(result); return NO_ERROR; } break; case DEQUEUE_BUFFER: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); bool async = data.readInt32(); uint32_t w = data.readInt32(); uint32_t h = data.readInt32(); uint32_t format = data.readInt32(); uint32_t usage = data.readInt32(); int buf; sp<Fence> fence; int result = dequeueBuffer(&buf, &fence, async, w, h, format, usage); reply->writeInt32(buf); reply->writeInt32(fence != NULL); if (fence != NULL) { reply->write(*fence); } reply->writeInt32(result); return NO_ERROR; } break; case QUEUE_BUFFER: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); int buf = data.readInt32(); QueueBufferInput input(data); QueueBufferOutput* const output = reinterpret_cast<QueueBufferOutput *>( reply->writeInplace(sizeof(QueueBufferOutput))); status_t result = queueBuffer(buf, input, output); reply->writeInt32(result); return NO_ERROR; } break; case CANCEL_BUFFER: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); int buf = data.readInt32(); sp<Fence> fence = new Fence(); data.read(*fence.get()); cancelBuffer(buf, fence); return NO_ERROR; } break; case QUERY: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); int value; int what = data.readInt32(); int res = query(what, &value); reply->writeInt32(value); reply->writeInt32(res); return NO_ERROR; } break; case CONNECT: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); sp<IBinder> token = data.readStrongBinder(); int api = data.readInt32(); bool producerControlledByApp = data.readInt32(); QueueBufferOutput* const output = reinterpret_cast<QueueBufferOutput *>( reply->writeInplace(sizeof(QueueBufferOutput))); status_t res = connect(token, api, producerControlledByApp, output); reply->writeInt32(res); return NO_ERROR; } break; case DISCONNECT: { CHECK_INTERFACE(IGraphicBufferProducer, data, reply); int api = data.readInt32(); status_t res = disconnect(api); reply->writeInt32(res); return NO_ERROR; } break; case SET_CROP: { Rect reg; CHECK_INTERFACE(ISurfaceTexture, data, reply); reg.left = data.readFloat(); reg.top = data.readFloat(); reg.right = data.readFloat(); reg.bottom = data.readFloat(); status_t result = setCrop(reg); reply->writeInt32(result); return NO_ERROR; } break; case SET_TRANSFORM: { uint32_t transform; CHECK_INTERFACE(ISurfaceTexture, data, reply); transform = data.readInt32(); status_t result = setCurrentTransform(transform); reply->writeInt32(result); return NO_ERROR; } break; case SET_SCALINGMODE: { uint32_t scalingmode; CHECK_INTERFACE(ISurfaceTexture, data, reply); scalingmode = data.readInt32(); status_t result = setCurrentScalingMode(scalingmode); reply->writeInt32(result); return NO_ERROR; } break; case SET_TIMESTEAP: { uint32_t timestamp; CHECK_INTERFACE(ISurfaceTexture, data, reply); timestamp = data.readInt64(); status_t result = setTimestamp(timestamp); reply->writeInt32(result); return NO_ERROR; } break; case SET_PARAMETER: { CHECK_INTERFACE(ISurfaceTexture, data, reply); uint32_t cmd = (uint32_t)data.readInt32(); uint32_t value; if(cmd == HWC_LAYER_SETINITPARA) { layerinitpara_t layer_info; data.read((void *)&layer_info,sizeof(layerinitpara_t)); value = (uint32_t)&layer_info; } else if(cmd == HWC_LAYER_SETFRAMEPARA) { libhwclayerpara_t frame_info; data.read((void *)&frame_info,sizeof(libhwclayerpara_t)); value = (uint32_t)&frame_info; } else if(cmd == HWC_LAYER_SET3DMODE) { video3Dinfo_t _3d_info; data.read((void *)&_3d_info, sizeof(video3Dinfo_t)); value = (uint32_t)&_3d_info; } else { value = (uint32_t)data.readInt32(); } int res = setParameter(cmd,value); reply->writeInt32(res); return NO_ERROR; } break; case GET_PARAMETER: { CHECK_INTERFACE(ISurfaceTexture, data, reply); uint32_t cmd = (uint32_t)data.readInt32(); uint32_t res = getParameter(cmd); reply->writeInt32((int32_t)res); return NO_ERROR; } break; } return BBinder::onTransact(code, data, reply, flags); }
status_t BnSurfaceTexture::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { switch(code) { case REQUEST_BUFFER: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int bufferIdx = data.readInt32(); sp<GraphicBuffer> buffer; int result = requestBuffer(bufferIdx, &buffer); reply->writeInt32(buffer != 0); if (buffer != 0) { reply->write(*buffer); } reply->writeInt32(result); return NO_ERROR; } break; case SET_BUFFER_COUNT: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int bufferCount = data.readInt32(); int result = setBufferCount(bufferCount); reply->writeInt32(result); return NO_ERROR; } break; case DEQUEUE_BUFFER: { CHECK_INTERFACE(ISurfaceTexture, data, reply); uint32_t w = data.readInt32(); uint32_t h = data.readInt32(); uint32_t format = data.readInt32(); uint32_t usage = data.readInt32(); int buf; int result = dequeueBuffer(&buf, w, h, format, usage); reply->writeInt32(buf); reply->writeInt32(result); return NO_ERROR; } break; case QUEUE_BUFFER: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int buf = data.readInt32(); int64_t timestamp = data.readInt64(); uint32_t outWidth, outHeight, outTransform; status_t result = queueBuffer(buf, timestamp, &outWidth, &outHeight, &outTransform); reply->writeInt32(outWidth); reply->writeInt32(outHeight); reply->writeInt32(outTransform); reply->writeInt32(result); return NO_ERROR; } break; case CANCEL_BUFFER: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int buf = data.readInt32(); cancelBuffer(buf); return NO_ERROR; } break; case SET_CROP: { Rect reg; CHECK_INTERFACE(ISurfaceTexture, data, reply); reg.left = data.readFloat(); reg.top = data.readFloat(); reg.right = data.readFloat(); reg.bottom = data.readFloat(); status_t result = setCrop(reg); reply->writeInt32(result); return NO_ERROR; } break; case SET_TRANSFORM: { CHECK_INTERFACE(ISurfaceTexture, data, reply); uint32_t transform = data.readInt32(); status_t result = setTransform(transform); reply->writeInt32(result); return NO_ERROR; } break; case SET_SCALING_MODE: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int mode = data.readInt32(); status_t result = setScalingMode(mode); reply->writeInt32(result); return NO_ERROR; } break; case QUERY: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int value; int what = data.readInt32(); int res = query(what, &value); reply->writeInt32(value); reply->writeInt32(res); return NO_ERROR; } break; case SET_SYNCHRONOUS_MODE: { CHECK_INTERFACE(ISurfaceTexture, data, reply); bool enabled = data.readInt32(); status_t res = setSynchronousMode(enabled); reply->writeInt32(res); return NO_ERROR; } break; case CONNECT: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int api = data.readInt32(); uint32_t outWidth, outHeight, outTransform; status_t res = connect(api, &outWidth, &outHeight, &outTransform); reply->writeInt32(outWidth); reply->writeInt32(outHeight); reply->writeInt32(outTransform); reply->writeInt32(res); return NO_ERROR; } break; case DISCONNECT: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int api = data.readInt32(); status_t res = disconnect(api); reply->writeInt32(res); return NO_ERROR; } break; // [MTK] for S3D offset control //-------------------------------------------------------------- case SET_S3DOFFSET: { CHECK_INTERFACE(ISurfaceTexture, data, reply); int32_t offset = data.readInt32(); status_t result = setS3DOffset(offset); reply->writeInt32(result); return NO_ERROR; } break; } return BBinder::onTransact(code, data, reply, flags); }
bool HwcComposer2D::PrepareLayerList(Layer* aLayer, const nsIntRect& aClip, const Matrix& aParentTransform) { // NB: we fall off this path whenever there are container layers // that require intermediate surfaces. That means all the // GetEffective*() coordinates are relative to the framebuffer. bool fillColor = false; const nsIntRegion& visibleRegion = aLayer->GetEffectiveVisibleRegion(); if (visibleRegion.IsEmpty()) { return true; } uint8_t opacity = std::min(0xFF, (int)(aLayer->GetEffectiveOpacity() * 256.0)); #if ANDROID_VERSION < 18 if (opacity < 0xFF) { LOGD("%s Layer has planar semitransparency which is unsupported by hwcomposer", aLayer->Name()); return false; } #endif if (aLayer->GetMaskLayer()) { LOGD("%s Layer has MaskLayer which is unsupported by hwcomposer", aLayer->Name()); return false; } nsIntRect clip; if (!HwcUtils::CalculateClipRect(aParentTransform, aLayer->GetEffectiveClipRect(), aClip, &clip)) { LOGD("%s Clip rect is empty. Skip layer", aLayer->Name()); return true; } // HWC supports only the following 2D transformations: // // Scaling via the sourceCrop and displayFrame in HwcLayer // Translation via the sourceCrop and displayFrame in HwcLayer // Rotation (in square angles only) via the HWC_TRANSFORM_ROT_* flags // Reflection (horizontal and vertical) via the HWC_TRANSFORM_FLIP_* flags // // A 2D transform with PreservesAxisAlignedRectangles() has all the attributes // above Matrix layerTransform; if (!aLayer->GetEffectiveTransform().Is2D(&layerTransform) || !layerTransform.PreservesAxisAlignedRectangles()) { LOGD("Layer EffectiveTransform has a 3D transform or a non-square angle rotation"); return false; } Matrix layerBufferTransform; if (!aLayer->GetEffectiveTransformForBuffer().Is2D(&layerBufferTransform) || !layerBufferTransform.PreservesAxisAlignedRectangles()) { LOGD("Layer EffectiveTransformForBuffer has a 3D transform or a non-square angle rotation"); return false; } if (ContainerLayer* container = aLayer->AsContainerLayer()) { if (container->UseIntermediateSurface()) { LOGD("Container layer needs intermediate surface"); return false; } nsAutoTArray<Layer*, 12> children; container->SortChildrenBy3DZOrder(children); for (uint32_t i = 0; i < children.Length(); i++) { if (!PrepareLayerList(children[i], clip, layerTransform)) { return false; } } return true; } LayerRenderState state = aLayer->GetRenderState(); if (!state.mSurface.get()) { if (aLayer->AsColorLayer() && mColorFill) { fillColor = true; } else { LOGD("%s Layer doesn't have a gralloc buffer", aLayer->Name()); return false; } } nsIntRect visibleRect = visibleRegion.GetBounds(); nsIntRect bufferRect; if (fillColor) { bufferRect = nsIntRect(visibleRect); } else { nsIntRect layerRect; if (state.mHasOwnOffset) { bufferRect = nsIntRect(state.mOffset.x, state.mOffset.y, state.mSize.width, state.mSize.height); layerRect = bufferRect; } else { //Since the buffer doesn't have its own offset, assign the whole //surface size as its buffer bounds bufferRect = nsIntRect(0, 0, state.mSize.width, state.mSize.height); layerRect = bufferRect; if (aLayer->GetType() == Layer::TYPE_IMAGE) { ImageLayer* imageLayer = static_cast<ImageLayer*>(aLayer); if(imageLayer->GetScaleMode() != ScaleMode::SCALE_NONE) { layerRect = nsIntRect(0, 0, imageLayer->GetScaleToSize().width, imageLayer->GetScaleToSize().height); } } } // In some cases the visible rect assigned to the layer can be larger // than the layer's surface, e.g., an ImageLayer with a small Image // in it. visibleRect.IntersectRect(visibleRect, layerRect); } // Buffer rotation is not to be confused with the angled rotation done by a transform matrix // It's a fancy PaintedLayer feature used for scrolling if (state.BufferRotated()) { LOGD("%s Layer has a rotated buffer", aLayer->Name()); return false; } const bool needsYFlip = state.OriginBottomLeft() ? true : false; hwc_rect_t sourceCrop, displayFrame; if(!HwcUtils::PrepareLayerRects(visibleRect, layerTransform, layerBufferTransform, clip, bufferRect, needsYFlip, &(sourceCrop), &(displayFrame))) { return true; } // OK! We can compose this layer with hwc. int current = mList ? mList->numHwLayers : 0; // Do not compose any layer below full-screen Opaque layer // Note: It can be generalized to non-fullscreen Opaque layers. bool isOpaque = opacity == 0xFF && (state.mFlags & LayerRenderStateFlags::OPAQUE); // Currently we perform opacity calculation using the *bounds* of the layer. // We can only make this assumption if we're not dealing with a complex visible region. bool isSimpleVisibleRegion = visibleRegion.Contains(visibleRect); if (current && isOpaque && isSimpleVisibleRegion) { nsIntRect displayRect = nsIntRect(displayFrame.left, displayFrame.top, displayFrame.right - displayFrame.left, displayFrame.bottom - displayFrame.top); if (displayRect.Contains(mScreenRect)) { // In z-order, all previous layers are below // the current layer. We can ignore them now. mList->numHwLayers = current = 0; mHwcLayerMap.Clear(); } } if (!mList || current >= mMaxLayerCount) { if (!ReallocLayerList() || current >= mMaxLayerCount) { LOGE("PrepareLayerList failed! Could not increase the maximum layer count"); return false; } } HwcLayer& hwcLayer = mList->hwLayers[current]; hwcLayer.displayFrame = displayFrame; setCrop(&hwcLayer, sourceCrop); buffer_handle_t handle = fillColor ? nullptr : state.mSurface->getNativeBuffer()->handle; hwcLayer.handle = handle; hwcLayer.flags = 0; hwcLayer.hints = 0; hwcLayer.blending = isOpaque ? HWC_BLENDING_NONE : HWC_BLENDING_PREMULT; #if ANDROID_VERSION >= 17 hwcLayer.compositionType = HWC_FRAMEBUFFER; hwcLayer.acquireFenceFd = -1; hwcLayer.releaseFenceFd = -1; #if ANDROID_VERSION >= 18 hwcLayer.planeAlpha = opacity; #endif #else hwcLayer.compositionType = HwcUtils::HWC_USE_COPYBIT; #endif if (!fillColor) { if (state.FormatRBSwapped()) { if (!mRBSwapSupport) { LOGD("No R/B swap support in H/W Composer"); return false; } hwcLayer.flags |= HwcUtils::HWC_FORMAT_RB_SWAP; } // Translation and scaling have been addressed in PrepareLayerRects(). // Given the above and that we checked for PreservesAxisAlignedRectangles() // the only possible transformations left to address are // square angle rotation and horizontal/vertical reflection. // // The rotation and reflection permutations total 16 but can be // reduced to 8 transformations after eliminating redundancies. // // All matrices represented here are in the form // // | xx xy | // | yx yy | // // And ignore scaling. // // Reflection is applied before rotation gfx::Matrix rotation = layerTransform; // Compute fuzzy zero like PreservesAxisAlignedRectangles() if (fabs(rotation._11) < 1e-6) { if (rotation._21 < 0) { if (rotation._12 > 0) { // 90 degree rotation // // | 0 -1 | // | 1 0 | // hwcLayer.transform = HWC_TRANSFORM_ROT_90; LOGD("Layer rotated 90 degrees"); } else { // Horizontal reflection then 90 degree rotation // // | 0 -1 | | -1 0 | = | 0 -1 | // | 1 0 | | 0 1 | | -1 0 | // // same as vertical reflection then 270 degree rotation // // | 0 1 | | 1 0 | = | 0 -1 | // | -1 0 | | 0 -1 | | -1 0 | // hwcLayer.transform = HWC_TRANSFORM_ROT_90 | HWC_TRANSFORM_FLIP_H; LOGD("Layer vertically reflected then rotated 270 degrees"); } } else { if (rotation._12 < 0) { // 270 degree rotation // // | 0 1 | // | -1 0 | // hwcLayer.transform = HWC_TRANSFORM_ROT_270; LOGD("Layer rotated 270 degrees"); } else { // Vertical reflection then 90 degree rotation // // | 0 1 | | -1 0 | = | 0 1 | // | -1 0 | | 0 1 | | 1 0 | // // Same as horizontal reflection then 270 degree rotation // // | 0 -1 | | 1 0 | = | 0 1 | // | 1 0 | | 0 -1 | | 1 0 | // hwcLayer.transform = HWC_TRANSFORM_ROT_90 | HWC_TRANSFORM_FLIP_V; LOGD("Layer horizontally reflected then rotated 270 degrees"); } } } else if (rotation._11 < 0) { if (rotation._22 > 0) { // Horizontal reflection // // | -1 0 | // | 0 1 | // hwcLayer.transform = HWC_TRANSFORM_FLIP_H; LOGD("Layer rotated 180 degrees"); } else { // 180 degree rotation // // | -1 0 | // | 0 -1 | // // Same as horizontal and vertical reflection // // | -1 0 | | 1 0 | = | -1 0 | // | 0 1 | | 0 -1 | | 0 -1 | // hwcLayer.transform = HWC_TRANSFORM_ROT_180; LOGD("Layer rotated 180 degrees"); } } else { if (rotation._22 < 0) { // Vertical reflection // // | 1 0 | // | 0 -1 | // hwcLayer.transform = HWC_TRANSFORM_FLIP_V; LOGD("Layer rotated 180 degrees"); } else { // No rotation or reflection // // | 1 0 | // | 0 1 | // hwcLayer.transform = 0; } } const bool needsYFlip = state.OriginBottomLeft() ? true : false; if (needsYFlip) { // Invert vertical reflection flag if it was already set hwcLayer.transform ^= HWC_TRANSFORM_FLIP_V; } hwc_region_t region; if (visibleRegion.GetNumRects() > 1) { mVisibleRegions.push_back(HwcUtils::RectVector()); HwcUtils::RectVector* visibleRects = &(mVisibleRegions.back()); if(!HwcUtils::PrepareVisibleRegion(visibleRegion, layerTransform, layerBufferTransform, clip, bufferRect, visibleRects)) { return true; } region.numRects = visibleRects->size(); region.rects = &((*visibleRects)[0]); } else { region.numRects = 1; region.rects = &(hwcLayer.displayFrame); } hwcLayer.visibleRegionScreen = region; } else { hwcLayer.flags |= HwcUtils::HWC_COLOR_FILL; ColorLayer* colorLayer = aLayer->AsColorLayer(); if (colorLayer->GetColor().a < 1.0) { LOGD("Color layer has semitransparency which is unsupported"); return false; } hwcLayer.transform = colorLayer->GetColor().Packed(); } mHwcLayerMap.AppendElement(static_cast<LayerComposite*>(aLayer->ImplData())); mList->numHwLayers++; return true; }