Ejemplo n.º 1
0
/**
 - Application has picture buffer list with size of GOP + 1
 - Picture buffer list acts like as ring buffer
 - End of the list has the latest picture
 .
 \param   flush               cause encoder to encode a partial GOP
 \param   pcPicYuvOrg         original YUV picture
 \param   pcPicYuvTrueOrg     
 \param   snrCSC
 \retval  rcListPicYuvRecOut  list of reconstruction YUV pictures
 \retval  accessUnitsOut      list of output access units
 \retval  iNumEncoded         number of encoded pictures
 */
Void TEncTop::encode( Bool flush, TComPicYuv* pcPicYuvOrg, TComPicYuv* pcPicYuvTrueOrg, const InputColourSpaceConversion snrCSC, TComList<TComPicYuv*>& rcListPicYuvRecOut, std::list<AccessUnit>& accessUnitsOut, Int& iNumEncoded )
{
  if (pcPicYuvOrg != NULL)
  {
    // get original YUV
    TComPic* pcPicCurr = NULL;

    xGetNewPicBuffer( pcPicCurr );
    pcPicYuvOrg->copyToPic( pcPicCurr->getPicYuvOrg() );
    pcPicYuvTrueOrg->copyToPic( pcPicCurr->getPicYuvTrueOrg() );

    // compute image characteristics
    if ( getUseAdaptiveQP() )
    {
      m_cPreanalyzer.xPreanalyze( dynamic_cast<TEncPic*>( pcPicCurr ) );
    }
  }

  if ((m_iNumPicRcvd == 0) || (!flush && (m_iPOCLast != 0) && (m_iNumPicRcvd != m_iGOPSize) && (m_iGOPSize != 0)))
  {
    iNumEncoded = 0;
    return;
  }

  if ( m_RCEnableRateControl )
  {
    m_cRateCtrl.initRCGOP( m_iNumPicRcvd );
  }

  
  // compress GOP
  m_cGOPEncoder.compressGOP(m_iPOCLast, m_iNumPicRcvd, m_cListPic, rcListPicYuvRecOut, accessUnitsOut, false, false, snrCSC, m_printFrameMSE,&m_cSearch);

  if ( m_RCEnableRateControl )
  {
    m_cRateCtrl.destroyRCGOP();
  }

  iNumEncoded         = m_iNumPicRcvd;
  m_iNumPicRcvd       = 0;
  m_uiNumAllPicCoded += iNumEncoded;
}
Ejemplo n.º 2
0
Void TDecTop::xCreateLostPicture(Int iLostPoc)
{
    printf("\ninserting lost poc : %d\n",iLostPoc);
    TComPic *cFillPic;
    xGetNewPicBuffer(*(m_parameterSetManager.getFirstSPS()), *(m_parameterSetManager.getFirstPPS()), cFillPic, 0);
    cFillPic->getSlice(0)->initSlice();

    TComList<TComPic*>::iterator iterPic = m_cListPic.begin();
    Int closestPoc = 1000000;
    while ( iterPic != m_cListPic.end())
    {
        TComPic * rpcPic = *(iterPic++);
        if(abs(rpcPic->getPicSym()->getSlice(0)->getPOC() -iLostPoc)<closestPoc&&abs(rpcPic->getPicSym()->getSlice(0)->getPOC() -iLostPoc)!=0&&rpcPic->getPicSym()->getSlice(0)->getPOC()!=m_apcSlicePilot->getPOC())
        {
            closestPoc=abs(rpcPic->getPicSym()->getSlice(0)->getPOC() -iLostPoc);
        }
    }
    iterPic = m_cListPic.begin();
    while ( iterPic != m_cListPic.end())
    {
        TComPic *rpcPic = *(iterPic++);
        if(abs(rpcPic->getPicSym()->getSlice(0)->getPOC() -iLostPoc)==closestPoc&&rpcPic->getPicSym()->getSlice(0)->getPOC()!=m_apcSlicePilot->getPOC())
        {
            printf("copying picture %d to %d (%d)\n",rpcPic->getPicSym()->getSlice(0)->getPOC() ,iLostPoc,m_apcSlicePilot->getPOC());
            rpcPic->getPicYuvRec()->copyToPic(cFillPic->getPicYuvRec());
            break;
        }
    }
    cFillPic->setCurrSliceIdx(0);
    for(Int ctuRsAddr=0; ctuRsAddr<cFillPic->getNumberOfCtusInFrame(); ctuRsAddr++)
    {
        cFillPic->getCtu(ctuRsAddr)->initCtu(cFillPic, ctuRsAddr);
    }
    cFillPic->getSlice(0)->setReferenced(true);
    cFillPic->getSlice(0)->setPOC(iLostPoc);
    cFillPic->setReconMark(true);
    cFillPic->setOutputMark(true);
    if(m_pocRandomAccess == MAX_INT)
    {
        m_pocRandomAccess = iLostPoc;
    }
}
Ejemplo n.º 3
0
Bool TDecTop::decode(InputNALUnit& nalu, Int& iSkipFrame, Int& iPOCLastDisplay)
{
  TComPic*&   pcPic         = m_pcPic;
#if E045_SLICE_COMMON_INFO_SHARING
  static TComPPS*    pcNewPPS = NULL;
#endif

  // Initialize entropy decoder
  m_cEntropyDecoder.setEntropyDecoder (&m_cCavlcDecoder);
  m_cEntropyDecoder.setBitstream      (nalu.m_Bitstream);

  switch (nalu.m_UnitType)
  {
    case NAL_UNIT_SPS:
      m_cEntropyDecoder.decodeSPS( &m_cSPS );

#if ENABLE_ANAYSIS_OUTPUT
      TSysuAnalyzerOutput::getInstance()->writeOutSps( &m_cSPS );
#endif

#if AMP    
      for (Int i = 0; i < m_cSPS.getMaxCUDepth() - 1; i++)
      {
        // m_cSPS.setAMPAcc( i, m_cSPS.getUseAMP() );
        m_cSPS.setAMPAcc( i, 1 );
      }

      for (Int i = m_cSPS.getMaxCUDepth() - 1; i < m_cSPS.getMaxCUDepth(); i++)
      {
        m_cSPS.setAMPAcc( i, 0 );
      }
#endif

      // create ALF temporary buffer
      m_cAdaptiveLoopFilter.create( m_cSPS.getWidth(), m_cSPS.getHeight(), g_uiMaxCUWidth, g_uiMaxCUHeight, g_uiMaxCUDepth );
#if MTK_SAO
      m_cSAO.create( m_cSPS.getWidth(), m_cSPS.getHeight(), g_uiMaxCUWidth, g_uiMaxCUHeight, g_uiMaxCUDepth );
#endif
#if PARALLEL_MERGED_DEBLK
      m_cLoopFilter.create( m_cSPS.getWidth(), m_cSPS.getHeight(), g_uiMaxCUWidth, g_uiMaxCUHeight, g_uiMaxCUDepth );
#else
      m_cLoopFilter.        create( g_uiMaxCUDepth );
#endif
#if E045_SLICE_COMMON_INFO_SHARING
      createPPSBuffer();
#endif
      m_uiValidPS |= 1;
      
      return false;

    case NAL_UNIT_PPS:

#if E045_SLICE_COMMON_INFO_SHARING
      pcNewPPS = getNewPPSBuffer();
#if SUB_LCU_DQP
      pcNewPPS->setSPS(&m_cSPS);
#endif
      m_cEntropyDecoder.decodePPS( pcNewPPS );
      if(pcNewPPS->getSharedPPSInfoEnabled())
      {
        if(m_cSPS.getUseALF())
        {
          m_cEntropyDecoder.decodeAlfParam( pcNewPPS->getSharedAlfParam());
        }
      }
      signalNewPPSAvailable();
#else
#if SUB_LCU_DQP
      m_cPPS.setSPS(&m_cSPS);
#endif
      m_cEntropyDecoder.decodePPS( &m_cPPS );
#endif
      m_uiValidPS |= 2;
      return false;

    case NAL_UNIT_SEI:
      m_SEIs = new SEImessages;
      m_cEntropyDecoder.decodeSEI(*m_SEIs);
      return false;

    case NAL_UNIT_CODED_SLICE:
    case NAL_UNIT_CODED_SLICE_IDR:
    case NAL_UNIT_CODED_SLICE_CDR:
    {
      // make sure we already received both parameter sets
      assert( 3 == m_uiValidPS );
      if (m_bFirstSliceInPicture)
      {
        m_apcSlicePilot->initSlice();
        m_uiSliceIdx     = 0;
        m_uiLastSliceIdx = 0;
#if E045_SLICE_COMMON_INFO_SHARING
        if(hasNewPPS())
        {
          m_pcPPS = pcNewPPS;
          updatePPSBuffer();
        }
#endif
      }
      m_apcSlicePilot->setSliceIdx(m_uiSliceIdx);

      //  Read slice header
      m_apcSlicePilot->setSPS( &m_cSPS );
#if E045_SLICE_COMMON_INFO_SHARING
      m_apcSlicePilot->setPPS( m_pcPPS );
#else
      m_apcSlicePilot->setPPS( &m_cPPS );
#endif
      m_apcSlicePilot->setSliceIdx(m_uiSliceIdx);
      if (!m_bFirstSliceInPicture)
      {
        memcpy(m_apcSlicePilot, pcPic->getPicSym()->getSlice(m_uiSliceIdx-1), sizeof(TComSlice));
      }

      m_apcSlicePilot->setNalUnitType(nalu.m_UnitType);
      m_apcSlicePilot->setReferenced(nalu.m_RefIDC != NAL_REF_IDC_PRIORITY_LOWEST);
      m_cEntropyDecoder.decodeSliceHeader (m_apcSlicePilot);

      if ( m_apcSlicePilot->getSymbolMode() )
      {
        Int numBitsForByteAlignment = nalu.m_Bitstream->getNumBitsUntilByteAligned();
        if ( numBitsForByteAlignment > 0 )
        {
          UInt bitsForByteAlignment;
          nalu.m_Bitstream->read( numBitsForByteAlignment, bitsForByteAlignment );
          assert( bitsForByteAlignment == ( ( 1 << numBitsForByteAlignment ) - 1 ) );
        }
      }
      m_apcSlicePilot->setTLayerInfo(nalu.m_TemporalID);

      if (m_apcSlicePilot->isNextSlice() && m_apcSlicePilot->getPOC()!=m_uiPrevPOC && !m_bFirstSliceInSequence)
      {
        m_uiPrevPOC = m_apcSlicePilot->getPOC();
        return true;
      }
      if (m_apcSlicePilot->isNextSlice()) 
        m_uiPrevPOC = m_apcSlicePilot->getPOC();
      m_bFirstSliceInSequence = false;
      if (m_apcSlicePilot->isNextSlice())
      {
        // Skip pictures due to random access
        if (isRandomAccessSkipPicture(iSkipFrame, iPOCLastDisplay))
        {
          return false;
        }
      }
      
      if (m_bFirstSliceInPicture)
      {
        // Buffer initialize for prediction.
        m_cPrediction.initTempBuff();
        //  Get a new picture buffer
        xGetNewPicBuffer (m_apcSlicePilot, pcPic);
        
        /* transfer any SEI messages that have been received to the picture */
        pcPic->setSEIs(m_SEIs);
        m_SEIs = NULL;

        // Recursive structure
        m_cCuDecoder.create ( g_uiMaxCUDepth, g_uiMaxCUWidth, g_uiMaxCUHeight );
        m_cCuDecoder.init   ( &m_cEntropyDecoder, &m_cTrQuant, &m_cPrediction );
        m_cTrQuant.init     ( g_uiMaxCUWidth, g_uiMaxCUHeight, m_apcSlicePilot->getSPS()->getMaxTrSize());
        
        m_cSliceDecoder.create( m_apcSlicePilot, m_apcSlicePilot->getSPS()->getWidth(), m_apcSlicePilot->getSPS()->getHeight(), g_uiMaxCUWidth, g_uiMaxCUHeight, g_uiMaxCUDepth );
      }

      //  Set picture slice pointer
      TComSlice*  pcSlice = m_apcSlicePilot;
      Bool bNextSlice     = pcSlice->isNextSlice();
      if (m_bFirstSliceInPicture) 
      {
        if(pcPic->getNumAllocatedSlice() != 1)
        {
          pcPic->clearSliceBuffer();
        }
      }
      else
      {
        pcPic->allocateNewSlice();
      }
      assert(pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
      m_apcSlicePilot = pcPic->getPicSym()->getSlice(m_uiSliceIdx); 
      pcPic->getPicSym()->setSlice(pcSlice, m_uiSliceIdx);

      pcPic->setTLayer(nalu.m_TemporalID);

      if (bNextSlice)
      {
        // Do decoding refresh marking if any
        pcSlice->decodingRefreshMarking(m_uiPOCCDR, m_bRefreshPending, m_cListPic);
        
        // Set reference list
        pcSlice->setRefPicList( m_cListPic );
        
        // HierP + GPB case
        if ( m_cSPS.getUseLDC() && pcSlice->isInterB() )
        {
          if(pcSlice->getRefPicListCombinationFlag() && (pcSlice->getNumRefIdx(REF_PIC_LIST_0) > pcSlice->getNumRefIdx(REF_PIC_LIST_1)))
          {
            for (Int iRefIdx = 0; iRefIdx < pcSlice->getNumRefIdx(REF_PIC_LIST_1); iRefIdx++)
            {
              pcSlice->setRefPic(pcSlice->getRefPic(REF_PIC_LIST_0, iRefIdx), REF_PIC_LIST_1, iRefIdx);
            }
          }
          else
          {
            Int iNumRefIdx = pcSlice->getNumRefIdx(REF_PIC_LIST_0);
            pcSlice->setNumRefIdx( REF_PIC_LIST_1, iNumRefIdx );
            
            for (Int iRefIdx = 0; iRefIdx < iNumRefIdx; iRefIdx++)
            {
              pcSlice->setRefPic(pcSlice->getRefPic(REF_PIC_LIST_0, iRefIdx), REF_PIC_LIST_1, iRefIdx);
            }
          }
        }
        
        // For generalized B
        // note: maybe not existed case (always L0 is copied to L1 if L1 is empty)
        if (pcSlice->isInterB() && pcSlice->getNumRefIdx(REF_PIC_LIST_1) == 0)
        {
          Int iNumRefIdx = pcSlice->getNumRefIdx(REF_PIC_LIST_0);
          pcSlice->setNumRefIdx        ( REF_PIC_LIST_1, iNumRefIdx );
          
          for (Int iRefIdx = 0; iRefIdx < iNumRefIdx; iRefIdx++)
          {
            pcSlice->setRefPic(pcSlice->getRefPic(REF_PIC_LIST_0, iRefIdx), REF_PIC_LIST_1, iRefIdx);
          }
        }
#if TMVP_ONE_LIST_CHECK
        if (pcSlice->isInterB())
        {
          Bool bLowDelay = true;
          Int  iCurrPOC  = pcSlice->getPOC();
          Int iRefIdx = 0;

          for (iRefIdx = 0; iRefIdx < pcSlice->getNumRefIdx(REF_PIC_LIST_0) && bLowDelay; iRefIdx++)
          {
            if ( pcSlice->getRefPic(REF_PIC_LIST_0, iRefIdx)->getPOC() > iCurrPOC )
            {
              bLowDelay = false;
            }
          }
          for (iRefIdx = 0; iRefIdx < pcSlice->getNumRefIdx(REF_PIC_LIST_1) && bLowDelay; iRefIdx++)
          {
            if ( pcSlice->getRefPic(REF_PIC_LIST_1, iRefIdx)->getPOC() > iCurrPOC )
            {
              bLowDelay = false;
            }
          }

          pcSlice->setCheckLDC(bLowDelay);            
        }
#endif
        
        //---------------
        pcSlice->setRefPOCList();
        
        if(!pcSlice->getRefPicListModificationFlagLC())
        {
          pcSlice->generateCombinedList();
        }
        
        pcSlice->setNoBackPredFlag( false );
        if ( pcSlice->getSliceType() == B_SLICE && !pcSlice->getRefPicListCombinationFlag())
        {
          if ( pcSlice->getNumRefIdx(RefPicList( 0 ) ) == pcSlice->getNumRefIdx(RefPicList( 1 ) ) )
          {
            pcSlice->setNoBackPredFlag( true );
            int i;
            for ( i=0; i < pcSlice->getNumRefIdx(RefPicList( 1 ) ); i++ )
            {
              if ( pcSlice->getRefPOC(RefPicList(1), i) != pcSlice->getRefPOC(RefPicList(0), i) ) 
              {
                pcSlice->setNoBackPredFlag( false );
                break;
              }
            }
          }
        }
      }
      
      pcPic->setCurrSliceIdx(m_uiSliceIdx);

      //  Decode a picture
#if REF_SETTING_FOR_LD
      m_cGopDecoder.decompressGop(nalu.m_Bitstream, pcPic, false, m_cListPic );
#else
      m_cGopDecoder.decompressGop(nalu.m_Bitstream, pcPic, false);
#endif

      m_bFirstSliceInPicture = false;
      m_uiSliceIdx++;
    }
      break;
    default:
      assert (1);
  }

  return false;
}
Ejemplo n.º 4
0
ErrVal H264AVCDecoderTest::go()
{
  PicBuffer*    pcPicBuffer = NULL;
  PicBufferList cPicBufferOutputList; 
  PicBufferList cPicBufferUnusedList;
  PicBufferList cPicBufferReleaseList;

  UInt      uiMbX           = 0;
  UInt      uiMbY           = 0;
  UInt      uiNalUnitType   = 0;
  UInt      uiSize          = 0;
  UInt      uiLumOffset     = 0;
  UInt      uiCbOffset      = 0;
  UInt      uiCrOffset      = 0;
  UInt      uiFrame;
  
  Bool      bEOS            = false;
  Bool      bYuvDimSet      = false;


  // HS: packet trace
  UInt   uiMaxPocDiff = m_pcParameter->uiMaxPocDiff;
  UInt   uiLastPoc    = MSYS_UINT_MAX;
  UChar* pcLastFrame  = 0;
  UInt   uiPreNalUnitType = 0;

  cPicBufferOutputList.clear();
  cPicBufferUnusedList.clear();


  RNOK( m_pcH264AVCDecoder->init(true, m_pcParameter) ); 


  Bool bToDecode = false; //JVT-P031
  for( uiFrame = 0; ( uiFrame <= MSYS_UINT_MAX && ! bEOS); )
  {
    BinData* pcBinData;
    BinDataAccessor cBinDataAccessor;

    Int  iPos;
//    Bool bFinishChecking;

    RNOK( m_pcReadBitstream->getPosition(iPos) );

    //JVT-P031
    Bool bFragmented = false;
    Bool bDiscardable = false;
    Bool bStart = false;
    Bool bFirst = true;
    UInt uiTotalLength = 0;
#define MAX_FRAGMENTS 10 // hard-coded
    BinData* pcBinDataTmp[MAX_FRAGMENTS];
    BinDataAccessor cBinDataAccessorTmp[MAX_FRAGMENTS];
    UInt uiFragNb, auiStartPos[MAX_FRAGMENTS], auiEndPos[MAX_FRAGMENTS];
	Bool bConcatenated = false; //FRAG_FIX_3
    Bool bSkip  = false;  // Dong: To skip unknown NAL unit types
    uiFragNb = 0;
    bEOS = false;
    pcBinData = 0;

    while(!bStart && !bEOS)
    {
      if(bFirst)
      {
          RNOK( m_pcReadBitstream->setPosition(iPos) );
          bFirst = false;
      }

      RNOK( m_pcReadBitstream->extractPacket( pcBinDataTmp[uiFragNb], bEOS ) );

//TMM_EC {{
			if( !bEOS && ((pcBinDataTmp[uiFragNb]->data())[0] & 0x1f )== 0x0b)
			{
				printf("end of stream\n");
				bEOS=true;
				uiNalUnitType= uiPreNalUnitType;
        RNOK( m_pcReadBitstream->releasePacket( pcBinDataTmp[uiFragNb] ) );
        pcBinDataTmp[uiFragNb] = new BinData;
				uiTotalLength	=	0;
        pcBinDataTmp[uiFragNb]->set( new UChar[uiTotalLength], uiTotalLength );
			}
//TMM_EC }}

      pcBinDataTmp[uiFragNb]->setMemAccessor( cBinDataAccessorTmp[uiFragNb] );

      bSkip = false;
      // open the NAL Unit, determine the type and if it's a slice get the frame size

      RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessorTmp[uiFragNb], 
                                            uiNalUnitType, uiMbX, uiMbY, uiSize,  true, 
		  false, //FRAG_FIX_3
//		  bStart, auiStartPos[uiFragNb], auiEndPos[uiFragNb], bFragmented, bDiscardable ) );
		  bStart, auiStartPos[uiFragNb], auiEndPos[uiFragNb], bFragmented, bDiscardable, this->m_pcParameter->getNumOfViews(), bSkip ) );

      uiTotalLength += auiEndPos[uiFragNb] - auiStartPos[uiFragNb];

      // Dong: Skip unknown NAL units
      if( bSkip )
      {
        printf("Unknown NAL unit type: %d\n", uiNalUnitType);
        uiTotalLength -= (auiEndPos[uiFragNb] - auiStartPos[uiFragNb]);
      }
      else if(!bStart)
      {
        ROT( bEOS) ; //[email protected]
        uiFragNb++;
      }
      else
      {
        if(pcBinDataTmp[0]->size() != 0)
        {
          pcBinData = new BinData;
          pcBinData->set( new UChar[uiTotalLength], uiTotalLength );
          // append fragments
          UInt uiOffset = 0;
          for(UInt uiFrag = 0; uiFrag<uiFragNb+1; uiFrag++)
          {
              memcpy(pcBinData->data()+uiOffset, pcBinDataTmp[uiFrag]->data() + auiStartPos[uiFrag], auiEndPos[uiFrag]-auiStartPos[uiFrag]);
              uiOffset += auiEndPos[uiFrag]-auiStartPos[uiFrag];
              RNOK( m_pcReadBitstream->releasePacket( pcBinDataTmp[uiFrag] ) );
              pcBinDataTmp[uiFrag] = NULL;
              if(uiNalUnitType != 6) //JVT-T054
              m_pcH264AVCDecoder->decreaseNumOfNALInAU();
			  //FRAG_FIX_3
			  if(uiFrag > 0) 
				  bConcatenated = true; //~FRAG_FIX_3
          }
          
          pcBinData->setMemAccessor( cBinDataAccessor );
          bToDecode = false;
          if((uiTotalLength != 0) && (!bDiscardable || bFragmented))
          {
              //FRAG_FIX
			if( (uiNalUnitType == 20) || (uiNalUnitType == 21) || (uiNalUnitType == 1) || (uiNalUnitType == 5) )
            {
                uiPreNalUnitType=uiNalUnitType;
                RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessor, uiNalUnitType, uiMbX, uiMbY, uiSize, 
					//uiNonRequiredPic, //NonRequired JVT-Q066
                    false, bConcatenated, //FRAG_FIX_3
					bStart, auiStartPos[uiFragNb+1], auiEndPos[uiFragNb+1], 
//                    bFragmented, bDiscardable) );
                    bFragmented, bDiscardable, this->m_pcParameter->getNumOfViews(), bSkip) );
            }

        else if( uiNalUnitType == 14 )
          {
                uiPreNalUnitType=uiNalUnitType;
                RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessor, uiNalUnitType, uiMbX, uiMbY, uiSize, 
					//uiNonRequiredPic, //NonRequired JVT-Q066
                    false, bConcatenated, //FRAG_FIX_3
					bStart, auiStartPos[uiFragNb+1], auiEndPos[uiFragNb+1], 
//                    bFragmented, bDiscardable) );
                    bFragmented, bDiscardable,this->m_pcParameter->getNumOfViews(), bSkip) );
                    
              }
              else
                  m_pcH264AVCDecoder->initPacket( &cBinDataAccessor );
              bToDecode = true;

              if( uiNalUnitType == 14 )
                bToDecode = false;
          }
        }
      }
    }

    //~JVT-P031

//NonRequired JVT-Q066{
	if(m_pcH264AVCDecoder->isNonRequiredPic())
		continue;
//NonRequired JVT-Q066}


// JVT-Q054 Red. Picture {
  RNOK( m_pcH264AVCDecoder->checkRedundantPic() );
  if ( m_pcH264AVCDecoder->isRedundantPic() )
    continue;
// JVT-Q054 Red. Picture }



  if(bToDecode)//JVT-P031
  {
    // get new picture buffer if required if coded Slice || coded IDR slice
    pcPicBuffer = NULL;
    
    if( uiNalUnitType == 1 || uiNalUnitType == 5 || uiNalUnitType == 20 || uiNalUnitType == 21 )
    {
      RNOK( xGetNewPicBuffer( pcPicBuffer, uiSize ) );

      if( ! bYuvDimSet )
      {
        UInt uiLumSize  = ((uiMbX<<3)+  YUV_X_MARGIN) * ((uiMbY<<3)    + YUV_Y_MARGIN ) * 4;
        uiLumOffset     = ((uiMbX<<4)+2*YUV_X_MARGIN) * YUV_Y_MARGIN   + YUV_X_MARGIN;  
        uiCbOffset      = ((uiMbX<<3)+  YUV_X_MARGIN) * YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + uiLumSize; 
        uiCrOffset      = ((uiMbX<<3)+  YUV_X_MARGIN) * YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + 5*uiLumSize/4;
        bYuvDimSet = true;

        // HS: decoder robustness
        pcLastFrame = new UChar [uiSize];
        ROF( pcLastFrame );
      }
    }
    

    // decode the NAL unit
    RNOK( m_pcH264AVCDecoder->process( pcPicBuffer, cPicBufferOutputList, cPicBufferUnusedList, cPicBufferReleaseList ) );

	// ROI DECODE ICU/ETRI
	m_pcH264AVCDecoder->RoiDecodeInit();

	setCrop();//lufeng: support frame cropping

    // picture output
    while( ! cPicBufferOutputList.empty() )
    {

//JVT-V054    
      if(!m_pcWriteYuv->getFileInitDone() )
      {
		  //UInt *vcOrder = m_pcH264AVCDecoder->getViewCodingOrder();
		  UInt *vcOrder = m_pcH264AVCDecoder->getViewCodingOrder_SubStream();
		  if(vcOrder == NULL)//lufeng: in order to output non-MVC seq
          {
			  //UInt order=0;
			  m_pcH264AVCDecoder->addViewCodingOrder();
			  //vcOrder = m_pcH264AVCDecoder->getViewCodingOrder();
			  vcOrder = m_pcH264AVCDecoder->getViewCodingOrder_SubStream();
		  }
       		m_pcWriteYuv->xInitMVC(m_pcParameter->cYuvFile, vcOrder, m_pcParameter->getNumOfViews()); // JVT-AB024 modified remove active view info SEI  			
      }

        PicBuffer* pcPicBufferTmp = cPicBufferOutputList.front();
      cPicBufferOutputList.pop_front();
        if( pcPicBufferTmp != NULL )
      {
        // HS: decoder robustness
          while( uiLastPoc + uiMaxPocDiff < (UInt)pcPicBufferTmp->getCts() )
        {
          RNOK( m_pcWriteYuv->writeFrame( pcLastFrame + uiLumOffset, 
                                          pcLastFrame + uiCbOffset, 
                                          pcLastFrame + uiCrOffset,
                                           uiMbY << 4,
                                           uiMbX << 4,
                                          (uiMbX << 4)+ YUV_X_MARGIN*2 ) );
          printf("REPEAT FRAME\n");
          uiFrame   ++;
          uiLastPoc += uiMaxPocDiff;
        }

		  
          if(m_pcParameter->getNumOfViews() > 0)
          {
			  UInt view_cnt;
			  
			  for (view_cnt=0; view_cnt < m_pcParameter->getNumOfViews(); view_cnt++){
				//UInt tmp_order=m_pcH264AVCDecoder->getViewCodingOrder()[view_cnt];
				  UInt tmp_order=m_pcH264AVCDecoder->getViewCodingOrder_SubStream()[view_cnt];
				if ((UInt)pcPicBufferTmp->getViewId() == tmp_order)
					break;
			  }

			  RNOK( m_pcWriteYuv->writeFrame( *pcPicBufferTmp + uiLumOffset, 
                                              *pcPicBufferTmp + uiCbOffset, 
                                              *pcPicBufferTmp + uiCrOffset,
                                              uiMbY << 4,
                                              uiMbX << 4,
                                              (uiMbX << 4)+ YUV_X_MARGIN*2,
                                              //(UInt)pcPicBufferTmp->getViewId(),
											   view_cnt) ); 
          }
          else
        RNOK( m_pcWriteYuv->writeFrame( *pcPicBufferTmp + uiLumOffset, 
                                        *pcPicBufferTmp + uiCbOffset, 
                                        *pcPicBufferTmp + uiCrOffset,
                                         uiMbY << 4,
                                         uiMbX << 4,
                                        (uiMbX << 4)+ YUV_X_MARGIN*2 ) );
        uiFrame++;
      
    
        // HS: decoder robustness
        uiLastPoc = (UInt)pcPicBufferTmp->getCts();
        ::memcpy( pcLastFrame, *pcPicBufferTmp+0, uiSize*sizeof(UChar) );
      }
    }
   } 
    RNOK( xRemovePicBuffer( cPicBufferReleaseList ) );
    RNOK( xRemovePicBuffer( cPicBufferUnusedList ) );
    if( pcBinData )
    {
      RNOK( m_pcReadBitstream->releasePacket( pcBinData ) );
      pcBinData = 0;
    }
  }
  printf("\n %d frames decoded\n", uiFrame );

  delete [] pcLastFrame; // HS: decoder robustness
  
  RNOK( m_pcH264AVCDecoder->uninit( true ) );
  
  m_pcParameter->nFrames  = uiFrame;
  m_pcParameter->nResult  = 0;

  return Err::m_nOK;
}
Ejemplo n.º 5
0
ErrVal
H264AVCEncoderTest::go()
{
  UInt                    uiWrittenBytes          = 0;
  const UInt              uiMaxFrame              = m_pcEncoderCodingParameter->getTotalFrames();
  UInt                    uiNumLayers             = ( m_pcEncoderCodingParameter->getMVCmode() ? 1 : m_pcEncoderCodingParameter->getNumberOfLayers() );
  UInt                    uiFrame;
  UInt                    uiLayer;
  UInt                    auiMbX                  [MAX_LAYERS];
  UInt                    auiMbY                  [MAX_LAYERS];
  UInt                    auiPicSize              [MAX_LAYERS];
  PicBuffer*              apcOriginalPicBuffer    [MAX_LAYERS];//original pic
  PicBuffer*              apcReconstructPicBuffer [MAX_LAYERS];//rec pic
  PicBufferList           acPicBufferOutputList   [MAX_LAYERS];
  PicBufferList           acPicBufferUnusedList   [MAX_LAYERS];
  ExtBinDataAccessorList  cOutExtBinDataAccessorList;
  Bool                    bMoreSets;

  
  //===== initialization =====
  RNOK( m_pcH264AVCEncoder->init( m_pcEncoderCodingParameter ) ); 


  //===== write parameter sets =====
  for( bMoreSets = true; bMoreSets;  )
  {
    UChar   aucParameterSetBuffer[1000];
    BinData cBinData;
    cBinData.reset();
    cBinData.set( aucParameterSetBuffer, 1000 );

    ExtBinDataAccessor cExtBinDataAccessor;
    cBinData.setMemAccessor( cExtBinDataAccessor );

    RNOK( m_pcH264AVCEncoder      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );
		if( m_pcH264AVCEncoder->getScalableSeiMessage() )
		{		
    RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
    RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
    
    uiWrittenBytes += 4 + cExtBinDataAccessor.size();
		}
    cBinData.reset();
  }

//JVT-W080, PDS SEI message
	if( m_pcEncoderCodingParameter->getMVCmode() && m_pcEncoderCodingParameter->getPdsEnable() )
	{
		//write SEI message
	  UChar   aucParameterSetBuffer[1000];
    BinData cBinData;
    cBinData.reset();
    cBinData.set( aucParameterSetBuffer, 1000 );

    ExtBinDataAccessor cExtBinDataAccessor;
    cBinData.setMemAccessor( cExtBinDataAccessor );

		const UInt uiSPSId = 0; //currently only one SPS with SPSId = 0
		UInt uiNumView       = m_pcEncoderCodingParameter->SpsMVC.getNumViewMinus1()+1;
		UInt* num_refs_list0_anc = new UInt [uiNumView];
		UInt* num_refs_list1_anc = new UInt [uiNumView];
		UInt* num_refs_list0_nonanc = new UInt [uiNumView];
		UInt* num_refs_list1_nonanc = new UInt [uiNumView];

		for( UInt i = 0; i < uiNumView; i++ )
		{
			num_refs_list0_anc[i]    = m_pcEncoderCodingParameter->SpsMVC.getNumAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 0 );
			num_refs_list1_anc[i]    = m_pcEncoderCodingParameter->SpsMVC.getNumAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 1 );
			num_refs_list0_nonanc[i] = m_pcEncoderCodingParameter->SpsMVC.getNumNonAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 0 );
			num_refs_list1_nonanc[i] = m_pcEncoderCodingParameter->SpsMVC.getNumNonAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 1 );		  
		}
//#define HELP_INFOR
#ifdef  HELP_INFOR
		printf("\n");
		for( UInt i = 0; i < uiNumView; i++ )
		{
			printf(" num_refs_list0_anchor: %d\tnum_refs_list0_nonanchor: %d\n num_refs_list1_anchor: %d\tnum_refs_list1_nonanchor: %d\n", num_refs_list0_anc[i], num_refs_list1_anc[i], num_refs_list0_nonanc[i], num_refs_list1_nonanc[i] );
		}
#endif

    UInt uiInitialPDIDelayAnc = m_pcEncoderCodingParameter->getPdsInitialDelayAnc();
    UInt uiInitialPDIDelayNonAnc = m_pcEncoderCodingParameter->getPdsInitialDelayNonAnc();
		if( uiInitialPDIDelayAnc < 2 )
			uiInitialPDIDelayAnc  = 2;
		if( uiInitialPDIDelayNonAnc < 2 )
			uiInitialPDIDelayNonAnc  = 2;
		RNOK( m_pcH264AVCEncoder->writePDSSEIMessage( &cExtBinDataAccessor
			                                           , uiSPSId
			                                           , uiNumView
			                                           , num_refs_list0_anc
																								 , num_refs_list1_anc
			                                           , num_refs_list0_nonanc
																								 , num_refs_list1_nonanc
																								 , uiInitialPDIDelayAnc
																								 , uiInitialPDIDelayNonAnc
																								) 
				);

		delete[] num_refs_list0_anc;
		delete[] num_refs_list1_anc;
		delete[] num_refs_list0_nonanc;
		delete[] num_refs_list1_nonanc;
		num_refs_list0_anc = NULL;
		num_refs_list1_anc = NULL;
		num_refs_list0_nonanc = NULL;
		num_refs_list1_nonanc = NULL;
	  if( m_pcEncoderCodingParameter->getCurentViewId() == m_pcEncoderCodingParameter->SpsMVC.m_uiViewCodingOrder[0] )
		{
			RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
			RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
			uiWrittenBytes += 4 + cExtBinDataAccessor.size();
		}

		cBinData.reset();
	}
//~JVT-W080
  //SEI {
  if( m_pcEncoderCodingParameter->getMultiviewSceneInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview scene information sei message
	  UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder ->writeMultiviewSceneInfoSEIMessage( &cExtBinDataAccessor ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &m_cBinDataStartCode ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes += 4 + cExtBinDataAccessor.size();
	  cBinData.reset();
  }
  if( m_pcEncoderCodingParameter->getMultiviewAcquisitionInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview acquisition information sei message
	  UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder ->writeMultiviewAcquisitionInfoSEIMessage( &cExtBinDataAccessor ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &m_cBinDataStartCode ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes += 4 + cExtBinDataAccessor.size();
	  cBinData.reset();
  }
  if( m_pcEncoderCodingParameter->getNestingSEIEnable() && m_pcEncoderCodingParameter->getSnapshotEnable() 
	  && m_pcEncoderCodingParameter->getCurentViewId() == 0 )
  {
   // add nesting sei message for view0
      UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder ->writeNestingSEIMessage( &cExtBinDataAccessor ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &m_cBinDataStartCode ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes += 4 + cExtBinDataAccessor.size();
	  cBinData.reset();
  }
//SEI }

  //===== determine parameters for required frame buffers =====
  for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
  {
    //auiMbX        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameWidth () >> 4;
    //auiMbY        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameHeight() >> 4;
    auiMbX        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameWidthInMbs();
    auiMbY        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameHeightInMbs();
    m_aauiCropping[uiLayer][0]     = 0;
    m_aauiCropping[uiLayer][1]     = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getHorPadding      ();
    m_aauiCropping[uiLayer][2]     = 0;
    m_aauiCropping[uiLayer][3]     = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getVerPadding      ();
    m_apcWriteYuv[uiLayer]->setCrop(m_aauiCropping[uiLayer]);

    UInt  uiSize            = ((auiMbY[uiLayer]<<4)+2*YUV_Y_MARGIN)*((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN);
    auiPicSize    [uiLayer] = ((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN)*((auiMbY[uiLayer]<<4)+2*YUV_Y_MARGIN)*3/2;
    m_auiLumOffset[uiLayer] = ((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN)* YUV_Y_MARGIN   + YUV_X_MARGIN;  
    m_auiCbOffset [uiLayer] = ((auiMbX[uiLayer]<<3)+  YUV_X_MARGIN)* YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + uiSize; 
    m_auiCrOffset [uiLayer] = ((auiMbX[uiLayer]<<3)+  YUV_X_MARGIN)* YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + 5*uiSize/4;
    m_auiHeight   [uiLayer] =   auiMbY[uiLayer]<<4;
    m_auiWidth    [uiLayer] =   auiMbX[uiLayer]<<4;
    m_auiStride   [uiLayer] =  (auiMbX[uiLayer]<<4)+ 2*YUV_X_MARGIN;
  }

  //===== loop over frames =====
  for( uiFrame = 0; uiFrame < uiMaxFrame; uiFrame++ )
  {
    //===== get picture buffers and read original pictures =====
    for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
    {
      UInt  uiSkip = ( 1 << m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getTemporalResolution() );

      if( uiFrame % uiSkip == 0 )
      {
        RNOK( xGetNewPicBuffer( apcReconstructPicBuffer [uiLayer], uiLayer, auiPicSize[uiLayer] ) );
        RNOK( xGetNewPicBuffer( apcOriginalPicBuffer    [uiLayer], uiLayer, auiPicSize[uiLayer] ) );

        RNOK( m_apcReadYuv[uiLayer]->readFrame( *apcOriginalPicBuffer[uiLayer] + m_auiLumOffset[uiLayer],
                                                *apcOriginalPicBuffer[uiLayer] + m_auiCbOffset [uiLayer],
                                                *apcOriginalPicBuffer[uiLayer] + m_auiCrOffset [uiLayer],
                                                m_auiHeight [uiLayer],
                                                m_auiWidth  [uiLayer],
                                                m_auiStride [uiLayer] ) );
      }
      else
      {
        apcReconstructPicBuffer [uiLayer] = 0;
        apcOriginalPicBuffer    [uiLayer] = 0;
      }
    }

    //===== call encoder =====
    RNOK( m_pcH264AVCEncoder->process( cOutExtBinDataAccessorList,
                                       apcOriginalPicBuffer,
                                       apcReconstructPicBuffer,
                                       acPicBufferOutputList,
                                       acPicBufferUnusedList ) );

    //===== write and release NAL unit buffers =====
    UInt  uiBytesUsed = 0;
    RNOK( xWrite  ( cOutExtBinDataAccessorList, uiBytesUsed ) );
    uiWrittenBytes   += uiBytesUsed;
    
    //===== write and release reconstructed pictures =====
    for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
    {
      RNOK( xWrite  ( acPicBufferOutputList[uiLayer], uiLayer ) );
      RNOK( xRelease( acPicBufferUnusedList[uiLayer], uiLayer ) );
    }
  }

  //===== finish encoding =====
  UInt  uiNumCodedFrames = 0;
  Double  dHighestLayerOutputRate = 0.0;
  RNOK( m_pcH264AVCEncoder->finish( cOutExtBinDataAccessorList,
                                    acPicBufferOutputList,
                                    acPicBufferUnusedList,
                                    uiNumCodedFrames,
                                    dHighestLayerOutputRate ) );


  //===== write and release NAL unit buffers =====
  RNOK( xWrite  ( cOutExtBinDataAccessorList, uiWrittenBytes ) );

  //===== write and release reconstructed pictures =====
  for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
  {
    RNOK( xWrite  ( acPicBufferOutputList[uiLayer], uiLayer ) );
    RNOK( xRelease( acPicBufferUnusedList[uiLayer], uiLayer ) );
  }


  //===== set parameters and output summary =====
  m_cEncoderIoParameter.nFrames = uiFrame;
  m_cEncoderIoParameter.nResult = 0;

  if( ! m_pcEncoderCodingParameter->getMVCmode() )
	{
		UChar   aucParameterSetBuffer[1000];
		BinData cBinData;
		cBinData.reset();
		cBinData.set( aucParameterSetBuffer, 1000 );

		ExtBinDataAccessor cExtBinDataAccessor;
		cBinData.setMemAccessor( cExtBinDataAccessor );
		m_pcH264AVCEncoder->SetVeryFirstCall();
		RNOK( m_pcH264AVCEncoder      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );
		RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
		RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
		uiWrittenBytes += 4 + cExtBinDataAccessor.size();
		cBinData.reset();
	}
//SEI {
  if( m_pcEncoderCodingParameter->getViewScalInfoSEIEnable() )
  {
    //view scalability information sei message
     UChar   aucParameterSetBuffer[1000];
     BinData cBinData;
     cBinData.reset();
     cBinData.set( aucParameterSetBuffer, 1000 );

     ExtBinDataAccessor cExtBinDataAccessor;
     cBinData.setMemAccessor( cExtBinDataAccessor );
     RNOK( m_pcH264AVCEncoder->writeViewScalInfoSEIMessage( &cExtBinDataAccessor ) );
     RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
     RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
     uiWrittenBytes += 4 + cExtBinDataAccessor.size();
     cBinData.reset();

  }
//SEI }
  if( m_pcWriteBitstreamToFile )
  {
    RNOK( m_pcWriteBitstreamToFile->uninit() );  
    RNOK( m_pcWriteBitstreamToFile->destroy() );  
  }

//SEI {
  if( m_pcEncoderCodingParameter->getViewScalInfoSEIEnable() )
  {
    RNOK    ( ViewScalableDealing() );
  }
//SEI }
  if( ! m_pcEncoderCodingParameter->getMVCmode() )
  {
	RNOK	( ScalableDealing() );
  }

  return Err::m_nOK;
}
Ejemplo n.º 6
0
Void TEncTop::encode(Bool flush, TComPicYuv* pcPicYuvOrg, TComPicYuv* pcPicYuvTrueOrg, const InputColourSpaceConversion snrCSC, TComList<TComPicYuv*>& rcListPicYuvRecOut, std::list<AccessUnit>& accessUnitsOut, Int& iNumEncoded, Bool isTff)
{
  iNumEncoded = 0;

  for (Int fieldNum=0; fieldNum<2; fieldNum++)
  {
    if (pcPicYuvOrg)
    {

      /* -- field initialization -- */
      const Bool isTopField=isTff==(fieldNum==0);

      TComPic *pcField;
      xGetNewPicBuffer( pcField );
      pcField->setReconMark (false);                     // where is this normally?

      if (fieldNum==1)                                   // where is this normally?
      {
        TComPicYuv* rpcPicYuvRec;

        // org. buffer
        if ( rcListPicYuvRecOut.size() >= (UInt)m_iGOPSize+1 ) // need to maintain field 0 in list of RecOuts while processing field 1. Hence +1 on m_iGOPSize.
        {
          rpcPicYuvRec = rcListPicYuvRecOut.popFront();
        }
        else
        {
          rpcPicYuvRec = new TComPicYuv;
          rpcPicYuvRec->create( m_iSourceWidth, m_iSourceHeight, m_chromaFormatIDC, m_maxCUWidth, m_maxCUHeight, m_maxTotalCUDepth, true);
        }
        rcListPicYuvRecOut.pushBack( rpcPicYuvRec );
      }

      pcField->getSlice(0)->setPOC( m_iPOCLast );        // superfluous?
      pcField->getPicYuvRec()->setBorderExtension(false);// where is this normally?

      pcField->setTopField(isTopField);                  // interlaced requirement

      for (UInt componentIndex = 0; componentIndex < pcPicYuvOrg->getNumberValidComponents(); componentIndex++)
      {
        const ComponentID component = ComponentID(componentIndex);
        const UInt stride = pcPicYuvOrg->getStride(component);

        separateFields((pcPicYuvOrg->getBuf(component) + pcPicYuvOrg->getMarginX(component) + (pcPicYuvOrg->getMarginY(component) * stride)),
                       pcField->getPicYuvOrg()->getAddr(component),
                       pcPicYuvOrg->getStride(component),
                       pcPicYuvOrg->getWidth(component),
                       pcPicYuvOrg->getHeight(component),
                       isTopField);

        separateFields((pcPicYuvTrueOrg->getBuf(component) + pcPicYuvTrueOrg->getMarginX(component) + (pcPicYuvTrueOrg->getMarginY(component) * stride)),
                       pcField->getPicYuvTrueOrg()->getAddr(component),
                       pcPicYuvTrueOrg->getStride(component),
                       pcPicYuvTrueOrg->getWidth(component),
                       pcPicYuvTrueOrg->getHeight(component),
                       isTopField);
      }

      // compute image characteristics
      if ( getUseAdaptiveQP() )
      {
        m_cPreanalyzer.xPreanalyze( dynamic_cast<TEncPic*>( pcField ) );
      }
    }

    if ( m_iNumPicRcvd && ((flush&&fieldNum==1) || (m_iPOCLast/2)==0 || m_iNumPicRcvd==m_iGOPSize ) )
    {
      // compress GOP
      m_cGOPEncoder.compressGOP(m_iPOCLast, m_iNumPicRcvd, m_cListPic, rcListPicYuvRecOut, accessUnitsOut, true, isTff, snrCSC, m_printFrameMSE,&m_cSearch);

      iNumEncoded += m_iNumPicRcvd;
      m_uiNumAllPicCoded += m_iNumPicRcvd;
      m_iNumPicRcvd = 0;
    }
  }
}
Ejemplo n.º 7
0
Void TDecTop::xActivateParameterSets()
{
    if (m_bFirstSliceInPicture)
    {
        const TComPPS *pps = m_parameterSetManager.getPPS(m_apcSlicePilot->getPPSId()); // this is a temporary PPS object. Do not store this value
        assert (pps != 0);

        const TComSPS *sps = m_parameterSetManager.getSPS(pps->getSPSId());             // this is a temporary SPS object. Do not store this value
        assert (sps != 0);

        m_parameterSetManager.clearSPSChangedFlag(sps->getSPSId());
        m_parameterSetManager.clearPPSChangedFlag(pps->getPPSId());

        if (false == m_parameterSetManager.activatePPS(m_apcSlicePilot->getPPSId(),m_apcSlicePilot->isIRAP()))
        {
            printf ("Parameter set activation failed!");
            assert (0);
        }

        // TODO: remove the use of the following globals:
        for (UInt channel = 0; channel < MAX_NUM_CHANNEL_TYPE; channel++)
        {
            g_bitDepth[channel] = sps->getBitDepth(ChannelType(channel));
            g_maxTrDynamicRange[channel] = (sps->getUseExtendedPrecision()) ? std::max<Int>(15, (g_bitDepth[channel] + 6)) : 15;
        }
        g_uiMaxCUWidth  = sps->getMaxCUWidth();
        g_uiMaxCUHeight = sps->getMaxCUHeight();
        g_uiMaxCUDepth  = sps->getMaxCUDepth();
        g_uiAddCUDepth  = max (0, sps->getLog2MinCodingBlockSize() - (Int)sps->getQuadtreeTULog2MinSize() + (Int)getMaxCUDepthOffset(sps->getChromaFormatIdc(), sps->getQuadtreeTULog2MinSize()));

        //  Get a new picture buffer. This will also set up m_pcPic, and therefore give us a SPS and PPS pointer that we can use.
        xGetNewPicBuffer (*(sps), *(pps), m_pcPic, m_apcSlicePilot->getTLayer());
        m_apcSlicePilot->applyReferencePictureSet(m_cListPic, m_apcSlicePilot->getRPS());

        // make the slice-pilot a real slice, and set up the slice-pilot for the next slice
        assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
        m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);

        // we now have a real slice:
        TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx);

        // Update the PPS and SPS pointers with the ones of the picture.
        pps=pSlice->getPPS();
        sps=pSlice->getSPS();

        // Initialise the various objects for the new set of settings
        m_cSAO.create( sps->getPicWidthInLumaSamples(), sps->getPicHeightInLumaSamples(), sps->getChromaFormatIdc(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getMaxCUDepth(), pps->getSaoOffsetBitShift(CHANNEL_TYPE_LUMA), pps->getSaoOffsetBitShift(CHANNEL_TYPE_CHROMA) );
        m_cLoopFilter.create( sps->getMaxCUDepth() );
        m_cPrediction.initTempBuff(sps->getChromaFormatIdc());


        Bool isField = false;
        Bool isTopField = false;

        if(!m_SEIs.empty())
        {
            // Check if any new Picture Timing SEI has arrived
            SEIMessages pictureTimingSEIs = extractSeisByType (m_SEIs, SEI::PICTURE_TIMING);
            if (pictureTimingSEIs.size()>0)
            {
                SEIPictureTiming* pictureTiming = (SEIPictureTiming*) *(pictureTimingSEIs.begin());
                isField    = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 2) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 10) || (pictureTiming->m_picStruct == 11) || (pictureTiming->m_picStruct == 12);
                isTopField = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 11);
            }
        }

        //Set Field/Frame coding mode
        m_pcPic->setField(isField);
        m_pcPic->setTopField(isTopField);

        // transfer any SEI messages that have been received to the picture
        m_pcPic->setSEIs(m_SEIs);
        m_SEIs.clear();

        // Recursive structure
        m_cCuDecoder.create ( sps->getMaxCUDepth(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getChromaFormatIdc() );
        m_cCuDecoder.init   ( &m_cEntropyDecoder, &m_cTrQuant, &m_cPrediction );
        m_cTrQuant.init     ( sps->getMaxTrSize() );

        m_cSliceDecoder.create();
    }
    else
    {
        // make the slice-pilot a real slice, and set up the slice-pilot for the next slice
        m_pcPic->allocateNewSlice();
        assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
        m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);

        TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx); // we now have a real slice.

        const TComSPS *sps = pSlice->getSPS();
        const TComPPS *pps = pSlice->getPPS();

        // check that the current active PPS has not changed...
        if (m_parameterSetManager.getSPSChangedFlag(sps->getSPSId()) )
        {
            printf("Error - a new SPS has been decoded while processing a picture\n");
            exit(1);
        }
        if (m_parameterSetManager.getPPSChangedFlag(pps->getPPSId()) )
        {
            printf("Error - a new PPS has been decoded while processing a picture\n");
            exit(1);
        }

        // Check if any new SEI has arrived
        if(!m_SEIs.empty())
        {
            // Currently only decoding Unit SEI message occurring between VCL NALUs copied
            SEIMessages &picSEI = m_pcPic->getSEIs();
            SEIMessages decodingUnitInfos = extractSeisByType (m_SEIs, SEI::DECODING_UNIT_INFO);
            picSEI.insert(picSEI.end(), decodingUnitInfos.begin(), decodingUnitInfos.end());
            deleteSEIs(m_SEIs);
        }
    }

}
Ejemplo n.º 8
0
void
H264AVCEncoderTest::xProcessView(processingInfo	auiProcessingInfo,UInt auiPicSize, UInt uiWrittenBytes, ExtBinDataAccessorList cOutExtBinDataAccessorList, PicBuffer* apcOriginalPicBuffer, PicBuffer* apcReconstructPicBuffer, PicBufferList acPicBufferOutputList, PicBufferList acPicBufferUnusedList){

	//system("pause");
	if(isVerbose)
		printf("Frame: %d\nMaxFrames: %d\n,View: %d\n",auiProcessingInfo.nFrame,auiProcessingInfo.nMaxFrames,auiProcessingInfo.nView);
	
	for( auiProcessingInfo.nFrame = 0; auiProcessingInfo.nFrame < auiProcessingInfo.nMaxFrames; auiProcessingInfo.nFrame++ )
  {
	  //m_apcRtpPacker->increaseTimeStamp();

	  if(isVerbose)
		printf("\nFrame: %d\n",auiProcessingInfo.nFrame);
	   //system("pause");
	  
		
	  UInt  uiSkip = ( 1 << m_pcEncoderCodingParameter[auiProcessingInfo.nView]->getLayerParameters( 0 ).getTemporalResolution() );
			  //UInt  uiSkip = ( 1 << m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( uiLayer ).getTemporalResolution() );
			  
			  //
			  //LLEGIM EL FRAME uiFrame PER LA VISTA uiLayer
			  //

	  if( auiProcessingInfo.nFrame % uiSkip == 0 )
			  {
				  xGetNewPicBuffer( apcReconstructPicBuffer , auiProcessingInfo.nView, auiPicSize );
				  xGetNewPicBuffer( apcOriginalPicBuffer   , auiProcessingInfo.nView, auiPicSize ) ;
				
				//printf("Reading Layer %d of frame %d\n",uiLayer,uiFrame);
				//m_apcReadYuv[uiLayer]->m_cFile.tell();
				m_apcReadYuv[auiProcessingInfo.nView]->readFrame( *apcOriginalPicBuffer + m_auiLumOffset[auiProcessingInfo.nView],
														*apcOriginalPicBuffer + m_auiCbOffset[auiProcessingInfo.nView],
														*apcOriginalPicBuffer + m_auiCrOffset[auiProcessingInfo.nView],
														m_auiHeight[auiProcessingInfo.nView] ,
														m_auiWidth[auiProcessingInfo.nView]  ,
														m_auiStride[auiProcessingInfo.nView] ) ;

				//printf("Frame %d, Layer %d, tamany original:%s\n",uiFrame,uiLayer,apcOriginalPicBuffer[uiLayer]);
				
			  }
			  else
			  {
				if(isVerbose)
					printf("Hi ha Hagut un SKIP a la part de readFrame()\n");

				apcReconstructPicBuffer  = 0;
				apcOriginalPicBuffer   = 0;		
			  }
			  

			  //
			  //PROCESSEM EL FRAME uiFrame PER LA VISTA uiLayer
			  //

			  if(isVerbose)
				  printf("View %d\t",auiProcessingInfo.nView);

			   m_pcH264AVCEncoder[auiProcessingInfo.nView]->process( cOutExtBinDataAccessorList,
											   apcOriginalPicBuffer,
											   apcReconstructPicBuffer,
											   &acPicBufferOutputList,
											   &acPicBufferUnusedList ) ;


			   //
			   //ESCRIVIM EL FRAME uiFrame PER LA VISTA uiLayer A DIFERENTS ARXIUS I BUFFERS(OUTPUT, REC, ETC...)
			   //

				//printf("Writing layer %d frame %d\n",uiLayer,uiFrame);
				UInt  uiBytesUsed = 0;
				if(m_pcEncoderCodingParameter[0]->isDebug()){
					if(isVerbose)
						printf("Write per debug\n");				
					xWrite  ( cOutExtBinDataAccessorList,uiBytesUsed) ;
				}
				else{
					{
						boost::mutex::scoped_lock io_lock(io_mutex);
						if(isVerbose)
							printf("View %d bloqueja el RtpPacker\n",auiProcessingInfo.nView);
					}
					//xSend(cOutExtBinDataAccessorList);

					if(!auiProcessingInfo.nView) //Si és la view 0, augmentem el timestamps
						m_apcRtpPacker->increaseTimeStamp();

					/*printf("Enviem tot NAL+data\n");
					system("pause");*/

					xAskForSend(cOutExtBinDataAccessorList,auiProcessingInfo.nView,auiProcessingInfo.nFrame);
					
				}
				
				//m_apcUDPController->send("Test");
				
						
				uiWrittenBytes  += uiBytesUsed;

			  
				//printf("Releasing layer %d frame %d\n",uiLayer,uiFrame);

				
				//S'Omple els fitxers c:/inputs/rec_X.yuv
				if(!m_pcEncoderCodingParameter[0]->isParallel()){
					printf("Write per No Parallel\n");
					xWrite  ( acPicBufferOutputList, auiProcessingInfo.nView ) ;
				}
				else
				{
					xRelease( acPicBufferOutputList, auiProcessingInfo.nView ) ;
				}
				//printf("Fem el xRelease del view %d\n",uiLayer);
				xRelease( acPicBufferUnusedList, auiProcessingInfo.nView ) ;
				//printf("Tamany del Buffer de REC[%d]=%d\n",uiLayer,acPicBufferOutputList[uiLayer].size());
				
		//}//endif
		

		
  }//endfor frame

}
Ejemplo n.º 9
0
Void TDecTop::xActivateParameterSets()
{
  if (m_bFirstSliceInPicture)
  {
    const TComPPS *pps = m_parameterSetManager.getPPS(m_apcSlicePilot->getPPSId()); // this is a temporary PPS object. Do not store this value
    assert (pps != 0);

    const TComSPS *sps = m_parameterSetManager.getSPS(pps->getSPSId());             // this is a temporary SPS object. Do not store this value
    assert (sps != 0);

    m_parameterSetManager.clearSPSChangedFlag(sps->getSPSId());
    m_parameterSetManager.clearPPSChangedFlag(pps->getPPSId());

    if (false == m_parameterSetManager.activatePPS(m_apcSlicePilot->getPPSId(),m_apcSlicePilot->isIRAP()))
    {
      printf ("Parameter set activation failed!");
      assert (0);
    }

    xParsePrefixSEImessages();

#if RExt__HIGH_BIT_DEPTH_SUPPORT==0
    if (sps->getSpsRangeExtension().getExtendedPrecisionProcessingFlag() || sps->getBitDepth(CHANNEL_TYPE_LUMA)>12 || sps->getBitDepth(CHANNEL_TYPE_CHROMA)>12 )
    {
      printf("High bit depth support must be enabled at compile-time in order to decode this bitstream\n");
      assert (0);
      exit(1);
    }
#endif

    // NOTE: globals were set up here originally. You can now use:
    // g_uiMaxCUDepth = sps->getMaxTotalCUDepth();
    // g_uiAddCUDepth = sps->getMaxTotalCUDepth() - sps->getLog2DiffMaxMinCodingBlockSize()

    //  Get a new picture buffer. This will also set up m_pcPic, and therefore give us a SPS and PPS pointer that we can use.
    xGetNewPicBuffer (*(sps), *(pps), m_pcPic, m_apcSlicePilot->getTLayer());
    m_apcSlicePilot->applyReferencePictureSet(m_cListPic, m_apcSlicePilot->getRPS());

    // make the slice-pilot a real slice, and set up the slice-pilot for the next slice
    assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
    m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);

    // we now have a real slice:
    TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx);

    // Update the PPS and SPS pointers with the ones of the picture.
    pps=pSlice->getPPS();
    sps=pSlice->getSPS();

    // Initialise the various objects for the new set of settings
    m_cSAO.create( sps->getPicWidthInLumaSamples(), sps->getPicHeightInLumaSamples(), sps->getChromaFormatIdc(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getMaxTotalCUDepth(), pps->getPpsRangeExtension().getLog2SaoOffsetScale(CHANNEL_TYPE_LUMA), pps->getPpsRangeExtension().getLog2SaoOffsetScale(CHANNEL_TYPE_CHROMA) );
    m_cLoopFilter.create( sps->getMaxTotalCUDepth() );
    m_cPrediction.initTempBuff(sps->getChromaFormatIdc());


    Bool isField = false;
    Bool isTopField = false;

    if(!m_SEIs.empty())
    {
      // Check if any new Picture Timing SEI has arrived
      SEIMessages pictureTimingSEIs = getSeisByType(m_SEIs, SEI::PICTURE_TIMING);
      if (pictureTimingSEIs.size()>0)
      {
        SEIPictureTiming* pictureTiming = (SEIPictureTiming*) *(pictureTimingSEIs.begin());
        isField    = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 2) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 10) || (pictureTiming->m_picStruct == 11) || (pictureTiming->m_picStruct == 12);
        isTopField = (pictureTiming->m_picStruct == 1) || (pictureTiming->m_picStruct == 9) || (pictureTiming->m_picStruct == 11);
      }
    }

    //Set Field/Frame coding mode
    m_pcPic->setField(isField);
    m_pcPic->setTopField(isTopField);

    // transfer any SEI messages that have been received to the picture
    m_pcPic->setSEIs(m_SEIs);
    m_SEIs.clear();

    // Recursive structure
    m_cCuDecoder.create ( sps->getMaxTotalCUDepth(), sps->getMaxCUWidth(), sps->getMaxCUHeight(), sps->getChromaFormatIdc() );
    m_cCuDecoder.init   ( &m_cEntropyDecoder, &m_cTrQuant, &m_cPrediction );
    m_cTrQuant.init     ( sps->getMaxTrSize() );

    m_cSliceDecoder.create();
  }
  else
  {
    // make the slice-pilot a real slice, and set up the slice-pilot for the next slice
    m_pcPic->allocateNewSlice();
    assert(m_pcPic->getNumAllocatedSlice() == (m_uiSliceIdx + 1));
    m_apcSlicePilot = m_pcPic->getPicSym()->swapSliceObject(m_apcSlicePilot, m_uiSliceIdx);

    TComSlice *pSlice = m_pcPic->getSlice(m_uiSliceIdx); // we now have a real slice.

    const TComSPS *sps = pSlice->getSPS();
    const TComPPS *pps = pSlice->getPPS();

    // check that the current active PPS has not changed...
    if (m_parameterSetManager.getSPSChangedFlag(sps->getSPSId()) )
    {
      printf("Error - a new SPS has been decoded while processing a picture\n");
      exit(1);
    }
    if (m_parameterSetManager.getPPSChangedFlag(pps->getPPSId()) )
    {
      printf("Error - a new PPS has been decoded while processing a picture\n");
      exit(1);
    }

    xParsePrefixSEImessages();

    // Check if any new SEI has arrived
     if(!m_SEIs.empty())
     {
       // Currently only decoding Unit SEI message occurring between VCL NALUs copied
       SEIMessages &picSEI = m_pcPic->getSEIs();
       SEIMessages decodingUnitInfos = extractSeisByType (m_SEIs, SEI::DECODING_UNIT_INFO);
       picSEI.insert(picSEI.end(), decodingUnitInfos.begin(), decodingUnitInfos.end());
       deleteSEIs(m_SEIs);
     }
  }
}