Void TDecGop::filterPicture(TComPic*& rpcPic) { TComSlice* pcSlice = rpcPic->getSlice(rpcPic->getCurrSliceIdx()); //-- For time output for each slice long iBeforeTime = clock(); // deblocking filter Bool bLFCrossTileBoundary = pcSlice->getPPS()->getLoopFilterAcrossTilesEnabledFlag(); m_pcLoopFilter->setCfg(bLFCrossTileBoundary); m_pcLoopFilter->loopFilterPic( rpcPic ); if(pcSlice->getSPS()->getUseSAO()) { m_sliceStartCUAddress.push_back(rpcPic->getNumCUsInFrame()* rpcPic->getNumPartInCU()); rpcPic->createNonDBFilterInfo(m_sliceStartCUAddress, 0, &m_LFCrossSliceBoundaryFlag, rpcPic->getPicSym()->getNumTiles(), bLFCrossTileBoundary); } if( pcSlice->getSPS()->getUseSAO() ) { { SAOParam *saoParam = rpcPic->getPicSym()->getSaoParam(); saoParam->bSaoFlag[0] = pcSlice->getSaoEnabledFlag(); saoParam->bSaoFlag[1] = pcSlice->getSaoEnabledFlagChroma(); m_pcSAO->setSaoLcuBasedOptimization(1); m_pcSAO->createPicSaoInfo(rpcPic); m_pcSAO->SAOProcess(saoParam); m_pcSAO->PCMLFDisableProcess(rpcPic); m_pcSAO->destroyPicSaoInfo(); } } if(pcSlice->getSPS()->getUseSAO()) { rpcPic->destroyNonDBFilterInfo(); } #if H_3D rpcPic->compressMotion(2); #endif #if !H_3D rpcPic->compressMotion(); #endif Char c = (pcSlice->isIntra() ? 'I' : pcSlice->isInterP() ? 'P' : 'B'); if (!pcSlice->isReferenced()) c += 32; //-- For time output for each slice #if H_MV printf("\nLayer %2d POC %4d TId: %1d ( %c-SLICE, QP%3d ) ", pcSlice->getLayerId(), pcSlice->getPOC(), pcSlice->getTLayer(), c, pcSlice->getSliceQp() ); #else printf("\nPOC %4d TId: %1d ( %c-SLICE, QP%3d ) ", pcSlice->getPOC(), pcSlice->getTLayer(), c, pcSlice->getSliceQp() ); #endif m_dDecTime += (Double)(clock()-iBeforeTime) / CLOCKS_PER_SEC; printf ("[DT %6.3f] ", m_dDecTime ); m_dDecTime = 0; for (Int iRefList = 0; iRefList < 2; iRefList++) { printf ("[L%d ", iRefList); for (Int iRefIndex = 0; iRefIndex < pcSlice->getNumRefIdx(RefPicList(iRefList)); iRefIndex++) { #if H_MV if( pcSlice->getLayerId() != pcSlice->getRefLayerId( RefPicList(iRefList), iRefIndex ) ) { printf( "V%d ", pcSlice->getRefLayerId( RefPicList(iRefList), iRefIndex ) ); } else { #endif printf ("%d ", pcSlice->getRefPOC(RefPicList(iRefList), iRefIndex)); #if H_MV } #endif } printf ("] "); } if (m_decodedPictureHashSEIEnabled) { SEIMessages pictureHashes = getSeisByType(rpcPic->getSEIs(), SEI::DECODED_PICTURE_HASH ); const SEIDecodedPictureHash *hash = ( pictureHashes.size() > 0 ) ? (SEIDecodedPictureHash*) *(pictureHashes.begin()) : NULL; if (pictureHashes.size() > 1) { printf ("Warning: Got multiple decoded picture hash SEI messages. Using first."); } calcAndPrintHashStatus(*rpcPic->getPicYuvRec(), hash); } rpcPic->setOutputMark(true); rpcPic->setReconMark(true); m_sliceStartCUAddress.clear(); m_LFCrossSliceBoundaryFlag.clear(); }
void DPB::prepareEncode(TComPic *pic) { PPAScopeEvent(DPB_prepareEncode); int pocCurr = pic->getSlice()->getPOC(); m_picList.pushFront(*pic); TComSlice* slice = pic->getSlice(); if (getNalUnitType(pocCurr, m_lastIDR, pic) == NAL_UNIT_CODED_SLICE_IDR_W_RADL || getNalUnitType(pocCurr, m_lastIDR, pic) == NAL_UNIT_CODED_SLICE_IDR_N_LP) { m_lastIDR = pocCurr; } slice->setLastIDR(m_lastIDR); slice->setTemporalLayerNonReferenceFlag(!slice->isReferenced()); // Set the nal unit type slice->setNalUnitType(getNalUnitType(pocCurr, m_lastIDR, pic)); // If the slice is un-referenced, change from _R "referenced" to _N "non-referenced" NAL unit type if (slice->getTemporalLayerNonReferenceFlag()) { switch (slice->getNalUnitType()) { case NAL_UNIT_CODED_SLICE_TRAIL_R: slice->setNalUnitType(NAL_UNIT_CODED_SLICE_TRAIL_N); break; case NAL_UNIT_CODED_SLICE_RADL_R: slice->setNalUnitType(NAL_UNIT_CODED_SLICE_RADL_N); break; case NAL_UNIT_CODED_SLICE_RASL_R: slice->setNalUnitType(NAL_UNIT_CODED_SLICE_RASL_N); break; default: break; } } // Do decoding refresh marking if any decodingRefreshMarking(pocCurr, slice->getNalUnitType()); computeRPS(pocCurr, slice->isIRAP(), slice->getLocalRPS(), slice->getSPS()->getMaxDecPicBuffering(0)); slice->setRPS(slice->getLocalRPS()); slice->setRPSidx(-1); // Force use of RPS from slice, rather than from SPS applyReferencePictureSet(slice->getRPS(), pocCurr); // Mark pictures in m_piclist as unreferenced if they are not included in RPS arrangeLongtermPicturesInRPS(slice); slice->setNumRefIdx(REF_PIC_LIST_0, X265_MIN(m_maxRefL0, slice->getRPS()->getNumberOfNegativePictures())); // Ensuring L0 contains just the -ve POC slice->setNumRefIdx(REF_PIC_LIST_1, X265_MIN(m_maxRefL1, slice->getRPS()->getNumberOfPositivePictures())); slice->setRefPicList(m_picList); // Slice type refinement if ((slice->getSliceType() == B_SLICE) && (slice->getNumRefIdx(REF_PIC_LIST_1) == 0)) { slice->setSliceType(P_SLICE); } if (slice->getSliceType() == B_SLICE) { // TODO: Can we estimate this from lookahead? slice->setColFromL0Flag(0); bool bLowDelay = true; int curPOC = slice->getPOC(); int refIdx = 0; for (refIdx = 0; refIdx < slice->getNumRefIdx(REF_PIC_LIST_0) && bLowDelay; refIdx++) { if (slice->getRefPic(REF_PIC_LIST_0, refIdx)->getPOC() > curPOC) { bLowDelay = false; } } for (refIdx = 0; refIdx < slice->getNumRefIdx(REF_PIC_LIST_1) && bLowDelay; refIdx++) { if (slice->getRefPic(REF_PIC_LIST_1, refIdx)->getPOC() > curPOC) { bLowDelay = false; } } slice->setCheckLDC(bLowDelay); } else { slice->setCheckLDC(true); } slice->setRefPOCList(); slice->setEnableTMVPFlag(1); bool bGPBcheck = false; if (slice->getSliceType() == B_SLICE) { if (slice->getNumRefIdx(REF_PIC_LIST_0) == slice->getNumRefIdx(REF_PIC_LIST_1)) { bGPBcheck = true; for (int i = 0; i < slice->getNumRefIdx(REF_PIC_LIST_1); i++) { if (slice->getRefPOC(REF_PIC_LIST_1, i) != slice->getRefPOC(REF_PIC_LIST_0, i)) { bGPBcheck = false; break; } } } } slice->setMvdL1ZeroFlag(bGPBcheck); slice->setNextSlice(false); /* Increment reference count of all motion-referenced frames. This serves two purposes. First * it prevents the frame from being recycled, and second the referenced frames know how many * other FrameEncoders are using them for motion reference */ int numPredDir = slice->isInterP() ? 1 : slice->isInterB() ? 2 : 0; for (int l = 0; l < numPredDir; l++) { for (int ref = 0; ref < slice->getNumRefIdx(l); ref++) { TComPic *refpic = slice->getRefPic(l, ref); ATOMIC_INC(&refpic->m_countRefEncoders); } } }
void FrameEncoder::compressFrame() { PPAScopeEvent(FrameEncoder_compressFrame); int64_t startCompressTime = x265_mdate(); TEncEntropy* entropyCoder = getEntropyCoder(0); TComSlice* slice = m_pic->getSlice(); m_nalCount = 0; int qp = slice->getSliceQp(); double lambda = 0; if (slice->getSliceType() == I_SLICE) { lambda = X265_MAX(1, x265_lambda2_tab_I[qp]); } else { lambda = X265_MAX(1, x265_lambda2_non_I[qp]); } // for RDO // in RdCost there is only one lambda because the luma and chroma bits are not separated, // instead we weight the distortion of chroma. int qpc; int chromaQPOffset = slice->getPPS()->getChromaCbQpOffset() + slice->getSliceQpDeltaCb(); qpc = Clip3(0, 57, qp + chromaQPOffset); double cbWeight = pow(2.0, (qp - g_chromaScale[qpc])); // takes into account of the chroma qp mapping and chroma qp Offset chromaQPOffset = slice->getPPS()->getChromaCrQpOffset() + slice->getSliceQpDeltaCr(); qpc = Clip3(0, 57, qp + chromaQPOffset); double crWeight = pow(2.0, (qp - g_chromaScale[qpc])); // takes into account of the chroma qp mapping and chroma qp Offset double chromaLambda = lambda / crWeight; TComPicYuv *fenc = slice->getPic()->getPicYuvOrg(); for (int i = 0; i < m_numRows; i++) { m_rows[i].m_search.setQPLambda(qp, lambda, chromaLambda); m_rows[i].m_search.m_me.setSourcePlane(fenc->getLumaAddr(), fenc->getStride()); m_rows[i].m_rdCost.setLambda(lambda); m_rows[i].m_rdCost.setCbDistortionWeight(cbWeight); m_rows[i].m_rdCost.setCrDistortionWeight(crWeight); } m_frameFilter.m_sao.lumaLambda = lambda; m_frameFilter.m_sao.chromaLambda = chromaLambda; switch (slice->getSliceType()) { case I_SLICE: m_frameFilter.m_sao.depth = 0; break; case P_SLICE: m_frameFilter.m_sao.depth = 1; break; case B_SLICE: m_frameFilter.m_sao.depth = 2 + !slice->isReferenced(); break; } slice->setSliceQpDelta(0); slice->setSliceQpDeltaCb(0); slice->setSliceQpDeltaCr(0); int numSubstreams = m_cfg->param.bEnableWavefront ? m_pic->getPicSym()->getFrameHeightInCU() : 1; // TODO: these two items can likely be FrameEncoder member variables to avoid re-allocs TComOutputBitstream* bitstreamRedirect = new TComOutputBitstream; TComOutputBitstream* outStreams = new TComOutputBitstream[numSubstreams]; if (m_cfg->getUseASR() && !slice->isIntra()) { int pocCurr = slice->getPOC(); int maxSR = m_cfg->param.searchRange; int numPredDir = slice->isInterP() ? 1 : 2; for (int dir = 0; dir <= numPredDir; dir++) { for (int refIdx = 0; refIdx < slice->getNumRefIdx(dir); refIdx++) { int refPOC = slice->getRefPic(dir, refIdx)->getPOC(); int newSR = Clip3(8, maxSR, (maxSR * ADAPT_SR_SCALE * abs(pocCurr - refPOC) + 4) >> 3); for (int i = 0; i < m_numRows; i++) { m_rows[i].m_search.setAdaptiveSearchRange(dir, refIdx, newSR); } } } }