Example #1
0
Bool ReconstructionBypass::xRequiresOutsidePadding( UInt uiMbX, UInt uiMbY, UInt uiFrameWidth, UInt uiFrameHeight, Bool bMbAff, UInt uiOrgMask, UInt* pauiMask )
{
  AOT( uiMbX >= uiFrameWidth  );
  AOT( uiMbY >= uiFrameHeight );

  //===== reset mask =====
  for( UInt uiIndex = 0; uiIndex < 9; uiIndex++ )
  {
    pauiMask[uiIndex] = MSYS_UINT_MAX;
  }

  //===== quick check =====
  uiMbY             >>= ( bMbAff ? 1 : 0 );
  uiFrameHeight     >>= ( bMbAff ? 1 : 0 );
  Bool  bLeftBorder   = ( uiMbX == 0 );
  Bool  bRightBorder  = ( uiMbX == uiFrameWidth  - 1 );
  Bool  bTopBorder    = ( uiMbY == 0 );
  Bool  bBottomBorder = ( uiMbY == uiFrameHeight - 1 );
  ROFRS ( bLeftBorder || bRightBorder || bTopBorder || bBottomBorder, false );

  //===== update masks and return =====
#define UPDATE_MASK(b,d) if((b)) { if( xOutshiftMask( bMbAff, (d), uiOrgMask, pauiMask[(d)] ) != Err::m_nOK ) assert( 0 ); }
  UPDATE_MASK( bLeftBorder  && bTopBorder,    0 );
  UPDATE_MASK(                 bTopBorder,    1 );
  UPDATE_MASK( bRightBorder && bTopBorder,    2 );
  UPDATE_MASK( bLeftBorder,                   3 );
  UPDATE_MASK( bRightBorder,                  5 );
  UPDATE_MASK( bLeftBorder  && bBottomBorder, 6 );
  UPDATE_MASK(                 bBottomBorder, 7 );
  UPDATE_MASK( bRightBorder && bBottomBorder, 8 );
#undef UPDATE_MASK
  return true;
}
Void TAppRendererTop::go()
{
  switch ( m_iRenderMode )
  {
  case 0:
    render();
    break;
#if H_3D_VSO
  case 1:
    renderModel();
    break;
#endif
  case 10:
    renderUsedPelsMap( );
      break;
  default:
    AOT(true);
  }

#if H_3D_REN_MAX_DEV_OUT
  Double dMaxDispDiff = m_cCameraData.getMaxShiftDeviation(); 

  if ( !(dMaxDispDiff < 0) )
  {  
    printf("\n Max. possible shift error: %12.3f samples.\n", dMaxDispDiff );
  }
#endif
}
Example #3
0
ErrVal H264AVCDecoderTest::xRemovePicBuffer( PicBufferList& rcPicBufferUnusedList )
{
  while( ! rcPicBufferUnusedList.empty() )
  {
    PicBuffer* pcBuffer = rcPicBufferUnusedList.popFront();

    if( NULL != pcBuffer )
    {
      PicBufferList::iterator  begin = m_cActivePicBufferList.begin();
      PicBufferList::iterator  end   = m_cActivePicBufferList.end  ();
      PicBufferList::iterator  iter  = std::find( begin, end, pcBuffer );
    
      AOT( pcBuffer->isUsed() )
      m_cUnusedPicBufferList.push_back( pcBuffer );
	  if (iter!=end)
	      m_cActivePicBufferList.erase    (  iter );
    }
  }

  // hwsun, fix meomory for field coding
  for(int i = (m_cActivePicBufferList.size() - m_pcH264AVCDecoder->getMaxEtrDPB() * 4); i > 0; i--)
  {
    PicBuffer* pcBuffer = m_cActivePicBufferList.popFront();    
    if( NULL != pcBuffer )
      m_cUnusedPicBufferList.push_back( pcBuffer );
  }

  return Err::m_nOK;
}
Example #4
0
Void
MbData::activateMotionRefinement()
{
  AOT( m_bHasMotionRefinement );
  m_bHasMotionRefinement = true;
  m_eMbModeBase          = m_eMbMode;
  ::memcpy( m_aBlkModeBase, m_aBlkMode, sizeof( m_aBlkMode ) );
  m_apcMbMotionDataBase[0]->copyFrom( *m_apcMbMotionData[0] );
  m_apcMbMotionDataBase[1]->copyFrom( *m_apcMbMotionData[1] );
}
Void TAppDecTop::setBWVSPLUT(TComSlice* pcSlice,  Int iCodedViewIdx,  Int iCurPoc)
{
  //first view does not have VSP 
  if((iCodedViewIdx == 0)) return;

  AOT( iCodedViewIdx <= 0);
  //AOT( iCodedViewIdx >= m_iNumberOfViews );
  Int iNeighborViewId = 0;
  //  Int* piShiftLUT = bRenderFromLeft ? m_cCamParsCollector.getBaseViewShiftLUTI()[iCodedViewIdx][iNeighborViewId][0] : m_cCamParsCollector.getBaseViewShiftLUTI()[iNeighborViewId][iCodedViewIdx][0];
  Int* piShiftLUT = m_cCamParsCollector.getBaseViewShiftLUTI()[iNeighborViewId][iCodedViewIdx][0];
  pcSlice->setBWVSPLUTParam(piShiftLUT, 2-LOG2_DISP_PREC_LUT );
}
Example #6
0
ControlData::~ControlData()
{
  AOT( m_pacBQMbQP );
  AOT( m_pauiBQMbCbp );
  AOT( m_pabBQ8x8Trafo );
  AOT( m_pacFGSMbQP );
  AOT( m_pauiFGSMbCbp );
  AOT( m_pabFGS8x8Trafo );
}
Void TAppRendererTop::renderUsedPelsMap( )
{
  xCreateLib();
  xInitLib();

  // Create Buffers Input Views;
  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvBaseDepth;

  // TemporalImprovement Filter
  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;

  Int aiPad[2] = { 0, 0 };

  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    TComPicYuv* pcNewVideoPic = new TComPicYuv;
    TComPicYuv* pcNewDepthPic = new TComPicYuv;

    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseVideo.push_back(pcNewVideoPic);

    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseDepth.push_back(pcNewDepthPic);

    //Temporal improvement Filter
    if ( m_bTempDepthFilter )
    {
      pcNewVideoPic = new TComPicYuv;
      pcNewDepthPic = new TComPicYuv;

      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);

      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
    }
  }

  // Create Buffer for synthesized View
  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );

  Bool bAnyEOS = false;

  Int iNumOfRenderedFrames = 0;
  Int iFrame = 0;

  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
  {
    if ( iFrame >= m_iFrameSkip )
    {      
      // read in depth and video
      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
      {
        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();

        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();

        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
        {
          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
        }
      }
    }
    else
    {
      std::cout << "Skipping Frame " << iFrame << std::endl;

      iFrame++;
      continue;
    }
    m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ) );

    for(Int iViewIdx=1; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
    {
      std::cout << "Rendering UsedPelsMap for Frame " << iFrame << " of View " << (Double) m_cCameraData.getBaseViewNumbers()[iViewIdx] << std::endl;

      Int iViewSIdx      = m_cCameraData.getBaseId2SortedId()[iViewIdx];
      Int iFirstViewSIdx = m_cCameraData.getBaseId2SortedId()[0];

      AOT( iViewSIdx == iFirstViewSIdx );

      Bool bFirstIsLeft = (iFirstViewSIdx < iViewSIdx);

      m_pcRenTop->setShiftLUTs(
        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
        -1
        );

      m_pcRenTop->getUsedSamplesMap( apcPicYuvBaseDepth[0], pcPicYuvSynthOut, bFirstIsLeft );

      // Write Output
      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut, 0, 0, 0 );

    }
    iFrame++;
    iNumOfRenderedFrames++;
  }

  // Delete Buffers
  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    apcPicYuvBaseVideo[uiBaseView]->destroy();
    delete apcPicYuvBaseVideo[uiBaseView];

    apcPicYuvBaseDepth[uiBaseView]->destroy();
    delete apcPicYuvBaseDepth[uiBaseView];

    // Temporal Filter
    if ( m_bTempDepthFilter )
    {
      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
      delete apcPicYuvLastBaseVideo[uiBaseView];

      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
      delete apcPicYuvLastBaseDepth[uiBaseView];
    }
  }
  pcPicYuvSynthOut->destroy();
  delete pcPicYuvSynthOut;

  xDestroyLib();

}
Void TAppRendererTop::xRenderModelFromNums()
{
  xCreateLib();
  xInitLib();

  // Create Buffers Input Views;
  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvBaseDepth;


  Int aiPad[2] = { 0, 0 };

  // Init Model
  TRenModel cCurModel;

  AOT( m_iLog2SamplingFactor != 0 );
  cCurModel.setupPart( 0, m_iSourceHeight  ); 
#if H_3D_VSO_EARLY_SKIP
  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
#else
  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
#endif

  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    TComPicYuv* pcNewVideoPic = new TComPicYuv;
    TComPicYuv* pcNewDepthPic = new TComPicYuv;

    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseVideo.push_back(pcNewVideoPic);

    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
  }

  for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
  {
    Int  iLeftBaseViewIdx  = -1;
    Int  iRightBaseViewIdx = -1;
    Bool bIsBaseView = false;

    Int iRelDistToLeft;
    m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft,  bIsBaseView );

    if (m_iRenderDirection == 1 )
    {
      iRightBaseViewIdx = -1;
      AOT( iLeftBaseViewIdx == -1);
    }

    if (m_iRenderDirection == 2 )
    {
      iLeftBaseViewIdx = -1;
      AOT( iRightBaseViewIdx == -1);
    }

    Int iLeftBaseViewSIdx  = -1;
    Int iRightBaseViewSIdx = -1;

    if (iLeftBaseViewIdx != -1 )
    {
      iLeftBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iLeftBaseViewIdx];
    }

    if (iRightBaseViewIdx != -1 )
    {
      iRightBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iRightBaseViewIdx];
    }
    cCurModel.createSingleModel(-1, -1, iSynthViewIdx, iLeftBaseViewSIdx, iRightBaseViewSIdx, false, m_iBlendMode );
  }

  // Create Buffer for synthesized View
  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );

  Bool bAnyEOS = false;

  Int iNumOfRenderedFrames = 0;
  Int iFrame = 0;

  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
  {

    if ( iFrame >= m_iFrameSkip )
    {      
      // read in depth and video
      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
      {
        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();

        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();

        if ( iFrame >= m_iFrameSkip )
        {
          Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
          cCurModel.setBaseView( iBaseViewSIdx, apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], NULL, NULL );
        }
      }
    }
    else
    {
      iFrame++;
      continue;
    }
    m_cCameraData.update( (UInt) (iFrame - m_iFrameSkip ));
    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
    {

      Int  iLeftBaseViewIdx  = -1;
      Int  iRightBaseViewIdx = -1;

      Bool bIsBaseView = false;

      Int iRelDistToLeft;
      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
      Bool bHasRView = ( iRightBaseViewIdx != -1 );

      switch( m_iRenderDirection )
      {
        /// INTERPOLATION
      case 0:
        assert( bHasLRView || bIsBaseView );

        if ( !bHasLRView && bIsBaseView ) // View to render is BaseView
        {
          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
        }
        else  // Render
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          cCurModel.setSingleModel( iSynthViewIdx,
                                    m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx]    ,
                                    m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
                                    m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx]    ,
                                    m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx] ,
                                    iRelDistToLeft,
                                    NULL );
          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_MERGED, pcPicYuvSynthOut );
        }
        break;
        /// EXTRAPOLATION FROM LEFT
      case 1:

        if ( !bHasLView ) // View to render is BaseView
        {
          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
        }
        else  // Render
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          cCurModel.setSingleModel( iSynthViewIdx, m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, -1,  NULL);
          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_LEFT, pcPicYuvSynthOut );
        }
        break;
        /// EXTRAPOLATION FROM RIGHT
      case 2:            // extrapolation from right
        if ( !bHasRView ) // View to render is BaseView
        {
          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          apcPicYuvBaseVideo[iRightBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
        }
        else  // Render
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          cCurModel.setSingleModel( iSynthViewIdx, NULL , NULL, m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx], NULL, -1, NULL);
          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_RIGHT, pcPicYuvSynthOut );
        }
        break;
      }

      // Write Output
      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
    }
    iFrame++;
    iNumOfRenderedFrames++;
  }

  // Delete Buffers
  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    apcPicYuvBaseVideo[uiBaseView]->destroy();
    delete apcPicYuvBaseVideo[uiBaseView];

    apcPicYuvBaseDepth[uiBaseView]->destroy();
    delete apcPicYuvBaseDepth[uiBaseView];
  }
  pcPicYuvSynthOut->destroy();
  delete pcPicYuvSynthOut;

  xDestroyLib();

}
Void TAppRendererTop::xRenderModelFromString()
{
    xCreateLib();
    xInitLib();

    // Create Buffers Input Views;
    std::vector<TComPicYuv*> apcPicYuvBaseVideo;
    std::vector<TComPicYuv*> apcPicYuvBaseDepth;


    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
    {
      TComPicYuv* pcNewVideoPic = new TComPicYuv;
      TComPicYuv* pcNewDepthPic = new TComPicYuv;

      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvBaseVideo.push_back(pcNewVideoPic);

      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvBaseDepth.push_back(pcNewDepthPic);
    }

    Int aiPad[2] = { 0, 0 };

    // Init Model
    TRenModel cCurModel;

    AOT( m_iLog2SamplingFactor != 0 );
#if H_3D_VSO_EARLY_SKIP
    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
#else
    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
#endif

    cCurModel.setupPart( 0, m_iSourceHeight  ); 

    for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
    {
      Int iNumOfModels   = m_cRenModStrParser.getNumOfModelsForView(iViewIdx, 1);

      for (Int iCurModel = 0; iCurModel < iNumOfModels; iCurModel++ )
      {
        Int iModelNum; Int iLeftViewNum; Int iRightViewNum; Int iDump; Int iOrgRefNum; Int iBlendMode;
        m_cRenModStrParser.getSingleModelData  ( iViewIdx, 1, iCurModel, iModelNum, iBlendMode, iLeftViewNum, iRightViewNum, iOrgRefNum, iDump ) ;
        cCurModel         .createSingleModel   ( iViewIdx, 1, iModelNum, iLeftViewNum, iRightViewNum, false, iBlendMode );

      }
    }

    // Create Buffer for synthesized View
    TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
    pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );

    Bool bAnyEOS = false;

    Int iNumOfRenderedFrames = 0;
    Int iFrame = 0;

    while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
    {

      if ( iFrame >= m_iFrameSkip )
      {      
        // read in depth and video
        for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
        {
          m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
          bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();

          m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
          bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
        }
      }
      else
      {
        iFrame++;
        continue;
      }


      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
      {
        TComPicYuv* pcPicYuvVideo = apcPicYuvBaseVideo[iBaseViewIdx];
        TComPicYuv* pcPicYuvDepth = apcPicYuvBaseDepth[iBaseViewIdx];
        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx ];
        cCurModel.setBaseView( iBaseViewSIdx, pcPicYuvVideo, pcPicYuvDepth, NULL, NULL );
      }

      m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ));

      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
      {
        // setup virtual views
        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];

        cCurModel.setErrorMode( iBaseViewSIdx, 1, 0 );
        Int iNumOfSV  = m_cRenModStrParser.getNumOfModelsForView( iBaseViewSIdx, 1);
        for (Int iCurView = 0; iCurView < iNumOfSV; iCurView++ )
        {
          Int iOrgRefBaseViewSIdx;
          Int iLeftBaseViewSIdx;
          Int iRightBaseViewSIdx;
          Int iSynthViewRelNum;
          Int iModelNum;
          Int iBlendMode;

          m_cRenModStrParser.getSingleModelData(iBaseViewSIdx, 1, iCurView, iModelNum, iBlendMode, iLeftBaseViewSIdx, iRightBaseViewSIdx, iOrgRefBaseViewSIdx, iSynthViewRelNum );

          Int iLeftBaseViewIdx    = -1;
          Int iRightBaseViewIdx   = -1;

          TComPicYuv* pcPicYuvOrgRef  = NULL;
          Int**      ppiShiftLUTLeft  = NULL;
          Int**      ppiShiftLUTRight = NULL;
          Int**      ppiBaseShiftLUTLeft  = NULL;
          Int**      ppiBaseShiftLUTRight = NULL;


          Int        iDistToLeft      = -1;

          Int iSynthViewIdx = m_cCameraData.synthRelNum2Idx( iSynthViewRelNum );

          if ( iLeftBaseViewSIdx != -1 )
          {
            iLeftBaseViewIdx   = m_cCameraData.getBaseSortedId2Id()   [ iLeftBaseViewSIdx ];
            ppiShiftLUTLeft    = m_cCameraData.getSynthViewShiftLUTI()[ iLeftBaseViewIdx  ][ iSynthViewIdx  ];
          }

          if ( iRightBaseViewSIdx != -1 )
          {
            iRightBaseViewIdx  = m_cCameraData.getBaseSortedId2Id()   [iRightBaseViewSIdx ];
            ppiShiftLUTRight   = m_cCameraData.getSynthViewShiftLUTI()[ iRightBaseViewIdx ][ iSynthViewIdx ];
          }

          if ( iRightBaseViewSIdx != -1 && iLeftBaseViewSIdx != -1 )
          {

            ppiBaseShiftLUTLeft  = m_cCameraData.getBaseViewShiftLUTI() [ iLeftBaseViewIdx  ][ iRightBaseViewIdx ];
            ppiBaseShiftLUTRight = m_cCameraData.getBaseViewShiftLUTI() [ iRightBaseViewIdx ][ iLeftBaseViewIdx  ];
            iDistToLeft    = m_cCameraData.getRelDistLeft(  iSynthViewIdx , iLeftBaseViewIdx, iRightBaseViewIdx);
          }

          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;

          cCurModel.setSingleModel( iModelNum, ppiShiftLUTLeft, ppiBaseShiftLUTLeft, ppiShiftLUTRight, ppiBaseShiftLUTRight, iDistToLeft, pcPicYuvOrgRef );

          Int iViewPos;
          if (iLeftBaseViewSIdx != -1 && iRightBaseViewSIdx != -1)
          {
            iViewPos = VIEWPOS_MERGED;
          }
          else if ( iLeftBaseViewSIdx != -1 )
          {
            iViewPos = VIEWPOS_LEFT;
          }
          else if ( iRightBaseViewSIdx != -1 )
          {
            iViewPos = VIEWPOS_RIGHT;
          }
          else
          {
            AOT(true);
          }

          cCurModel.getSynthVideo ( iModelNum, iViewPos, pcPicYuvSynthOut );

          // Write Output
          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, 0 ,0 ,0, 0 );
        }
      }
      iFrame++;
      iNumOfRenderedFrames++;
  }

    // Delete Buffers
    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
    {
      apcPicYuvBaseVideo[uiBaseView]->destroy();
      delete apcPicYuvBaseVideo[uiBaseView];

      apcPicYuvBaseDepth[uiBaseView]->destroy();
      delete apcPicYuvBaseDepth[uiBaseView];
}
    pcPicYuvSynthOut->destroy();
    delete pcPicYuvSynthOut;

    xDestroyLib();
}
Example #10
0
Void TAppRendererTop::render()
{
  xCreateLib();
  xInitLib();

  // Create Buffers Input Views;
  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvBaseDepth;

  // TemporalImprovement Filter
  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;

  Int aiPad[2] = { 0, 0 };

  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    TComPicYuv* pcNewVideoPic = new TComPicYuv;
    TComPicYuv* pcNewDepthPic = new TComPicYuv;

    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseVideo.push_back(pcNewVideoPic);

    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseDepth.push_back(pcNewDepthPic);

    //Temporal improvement Filter
    if ( m_bTempDepthFilter )
    {
      pcNewVideoPic = new TComPicYuv;
      pcNewDepthPic = new TComPicYuv;

      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);

      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
    }
  }

  // Create Buffer for synthesized View
  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );

  Bool bAnyEOS = false;

  Int iNumOfRenderedFrames = 0;
  Int iFrame = 0;

  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
  {
    if ( iFrame >= m_iFrameSkip ) 
    {
      // read in depth and video
      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
      {
        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;

        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();

        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();

        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();

        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
        {
          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
        }
      }
    }
    else    
    {
      std::cout << "Skipping Frame " << iFrame << std::endl;

      iFrame++;
      continue;
    }

    m_cCameraData.update( (UInt)iFrame - m_iFrameSkip );

    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
    {
      Int  iLeftBaseViewIdx  = -1;
      Int  iRightBaseViewIdx = -1;

      Bool bIsBaseView = false;

      Int iRelDistToLeft;
      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
      Bool bHasRView = ( iRightBaseViewIdx != -1 );
      Bool bRender   = true;

      Int  iBlendMode = m_iBlendMode;
      Int  iSimEnhBaseView = 0;

      switch( m_iRenderDirection )
      {
      /// INTERPOLATION
      case 0:
        AOF( bHasLRView || bIsBaseView );

        if ( !bHasLRView && bIsBaseView && m_iBlendMode == 0 )
        {
          bRender = false;
        }
        else
        {
          if ( bIsBaseView )
          {
            AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
            Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];

            if ( m_iBlendMode == 1 )
            {
              if ( iSortedBaseViewIdx - 1 >= 0 )
              {
                iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
                bRender = true;
              }
              else
              {
                bRender = false;
              }
            }
            else if ( m_iBlendMode == 2 )
            {
              if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
              {
                iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
                bRender = true;
              }
              else
              {
                bRender = false;
              }
            }
          }

          if ( m_iBlendMode == 3 )
          {
            if ( bIsBaseView && (iLeftBaseViewIdx == 0) )
            {
              bRender = false;
            }
            else
            {
              Int iDistLeft  = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx ]  );
              Int iDistRight = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iRightBaseViewIdx]  );

              Int iFillViewIdx = iDistLeft > iDistRight ? iLeftBaseViewIdx : iRightBaseViewIdx;

              if( m_cCameraData.getBaseId2SortedId()[0] < m_cCameraData.getBaseId2SortedId() [iFillViewIdx] )
              {
                iBlendMode        = 1;
                iLeftBaseViewIdx  = 0;
                iRightBaseViewIdx = iFillViewIdx;
              }
              else
              {
                iBlendMode        = 2;
                iLeftBaseViewIdx  = iFillViewIdx;
                iRightBaseViewIdx = 0;
              }

            }
          }
          else
          {
            iBlendMode = m_iBlendMode;
          }
        }

        if ( m_bSimEnhance )
        {
          if ( m_iNumberOfInputViews == 3 && m_cCameraData.getRelSynthViewNumbers()[ iSynthViewIdx ] < VIEW_NUM_PREC  )
          {
            iSimEnhBaseView = 2; // Take middle view
          }
          else
          {
            iSimEnhBaseView = 1; // Take left view
          }
        }

          if ( bRender )
          {
          std::cout << "Rendering Frame "    << iFrame
                    << " of View "           << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx    ] / VIEW_NUM_PREC
                    << "   Left BaseView: "  << (Double) m_cCameraData.getBaseViewNumbers() [iLeftBaseViewIdx ] / VIEW_NUM_PREC
                    << "   Right BaseView: " << (Double) m_cCameraData.getBaseViewNumbers() [iRightBaseViewIdx] / VIEW_NUM_PREC
                    << "   BlendMode: "      << iBlendMode
                    << std::endl;

          m_pcRenTop->setShiftLUTs(
            m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
            m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx],
            m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx ],

            iRelDistToLeft
          );

          m_pcRenTop->interpolateView(
            apcPicYuvBaseVideo[iLeftBaseViewIdx ],
            apcPicYuvBaseDepth[iLeftBaseViewIdx ],
            apcPicYuvBaseVideo[iRightBaseViewIdx],
            apcPicYuvBaseDepth[iRightBaseViewIdx],
            pcPicYuvSynthOut,
            iBlendMode,
            iSimEnhBaseView
            );
        }
        else
        {
          AOT(iLeftBaseViewIdx != iRightBaseViewIdx );
          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut );
          std::cout << "Copied    Frame " << iFrame
                    << " of View "        << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC
                    << "   (BaseView)  "    << std::endl;
        }

        break;
      /// EXTRAPOLATION FROM LEFT
      case 1:
        if ( !bHasLView ) // View to render is BaseView
        {
          bRender = false;
        }

          if (  bIsBaseView )
          {
          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
          if ( iSortedBaseViewIdx - 1 >= 0 )
          {
            iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
          }
          else
          {
            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
            bRender = false;
          }
        }


        if (bRender)
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          m_pcRenTop->setShiftLUTs( m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, NULL, -1 );
          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iLeftBaseViewIdx ], apcPicYuvBaseDepth[iLeftBaseViewIdx ], pcPicYuvSynthOut, true );
        }
        break;
      /// EXTRAPOLATION FROM RIGHT
      case 2:            // extrapolation from right
        if ( !bHasRView ) // View to render is BaseView
        {
          bRender = false;
        }

          if (  bIsBaseView )
          {

          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
          if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
          {
            iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
          }
          else
          {
            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
            bRender = false;
          }
        }

        if ( bRender )
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          m_pcRenTop->setShiftLUTs( NULL, NULL,NULL, m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx],NULL, iRelDistToLeft);
          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iRightBaseViewIdx ], apcPicYuvBaseDepth[iRightBaseViewIdx ], pcPicYuvSynthOut, false);
        }
        break;
      }

      // Write Output

      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
    }
    iFrame++;
    iNumOfRenderedFrames++;
  }

  // Delete Buffers
  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    apcPicYuvBaseVideo[uiBaseView]->destroy();
    delete apcPicYuvBaseVideo[uiBaseView];

    apcPicYuvBaseDepth[uiBaseView]->destroy();
    delete apcPicYuvBaseDepth[uiBaseView];

    // Temporal Filter
    if ( m_bTempDepthFilter )
    {
      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
      delete apcPicYuvLastBaseVideo[uiBaseView];

      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
      delete apcPicYuvLastBaseDepth[uiBaseView];
    }
  }

  pcPicYuvSynthOut->destroy();
  delete pcPicYuvSynthOut;

  xDestroyLib();

}
Example #11
0
MbDataCtrl::~MbDataCtrl()
{
  AOT( xDeleteData() );
  AOT( m_bInitDone );
}
Example #12
0
ErrVal MbDataCtrl::getBoundaryMask( Int iMbY, Int iMbX, UInt& ruiMask ) const 
{
  UInt     uiCurrIdx    = iMbY * m_uiMbStride + iMbX + m_uiMbOffset;
  AOT( uiCurrIdx >= m_uiSize );

  ruiMask               = 0;

  ROTRS( m_pcMbData[uiCurrIdx].isIntra(), Err::m_nOK );

  Bool bLeftAvailable   = ( iMbX > 0 );
  Bool bTopAvailable    = ( iMbY > 0 );
  Bool bRightAvailable  = ( iMbX < m_iMbPerLine-1 );
  Bool bBottomAvailable = ( iMbY < m_iMbPerColumn-1 );

  if( bTopAvailable )
  {
    {
    Int iIndex = uiCurrIdx - m_uiMbStride;
    ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x01 :0;
    }

    if( bLeftAvailable )
    {
      Int iIndex = uiCurrIdx - m_uiMbStride - 1;
      ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x80 :0;
    }

    if( bRightAvailable )
    {
      Int iIndex = uiCurrIdx - m_uiMbStride + 1;
      ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x02 :0;
    }
  }

  if( bBottomAvailable )
  {
    {
    Int iIndex = uiCurrIdx + m_uiMbStride;
    ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x10 :0;
    }

    if( bLeftAvailable )
    {
      Int iIndex = uiCurrIdx  + m_uiMbStride - 1;
      ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x20 :0;
    }

    if( bRightAvailable )
    {
      Int iIndex = uiCurrIdx + m_uiMbStride + 1;
      ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x08 :0;
    }
  }

  if( bLeftAvailable )
  {
    Int iIndex = uiCurrIdx-1;
    ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x40 :0;
  }

  if( bRightAvailable )
  {
    Int iIndex = uiCurrIdx + 1;
    ruiMask |= m_pcMbData[iIndex].isIntra() ? 0x04 :0;
  }
  return Err::m_nOK;
}
Example #13
0
ErrVal
SliceEncoder::xAddTCoeffs2( MbDataAccess& rcMbDataAccess, MbDataAccess& rcMbDataAccessBase )
{
  if( rcMbDataAccess.getMbData().isPCM() )
  {
    TCoeff* pSrc = rcMbDataAccessBase.getMbTCoeffs().getTCoeffBuffer();
    TCoeff* pDes = rcMbDataAccess    .getMbTCoeffs().getTCoeffBuffer();
    for( UInt ui = 0; ui < 384; ui++, pSrc++, pDes++ )
    {
      ROT( pDes->getLevel() );
      pDes->setLevel( pSrc->getLevel() );
    }
    ROT( rcMbDataAccess.getMbData().getMbExtCbp() );
    return Err::m_nOK;
  }

	UInt uiBCBP = 0;
	UInt uiCoded = 0;
	Bool bCoded = false;
	Bool bChromaAC = false;
	Bool bChromaDC = false;

	// Add the luma coefficients and track the new BCBP
	if( rcMbDataAccess.getMbData().isTransformSize8x8() )
	{

		for( B8x8Idx c8x8Idx; c8x8Idx.isLegal(); c8x8Idx++ )
		{
			bCoded = false;

			m_pcTransform->addPrediction8x8Blk( rcMbDataAccess.getMbTCoeffs().get8x8( c8x8Idx ),
				rcMbDataAccessBase.getMbTCoeffs().get8x8( c8x8Idx ),
				rcMbDataAccess.getMbData().getQp(),
				rcMbDataAccessBase.getMbData().getQp(), bCoded );

			if( rcMbDataAccess.getMbData().isIntra16x16() )
				AOT(1);

			if( bCoded )
				uiBCBP |= (0x33 << c8x8Idx.b4x4());
		}
	}
	else
	{

		for( B4x4Idx cIdx; cIdx.isLegal(); cIdx++ )
		{
			uiCoded = 0;

			m_pcTransform->addPrediction4x4Blk( rcMbDataAccess.getMbTCoeffs().get( cIdx ),
				rcMbDataAccessBase.getMbTCoeffs().get( cIdx ),
				rcMbDataAccess.getMbData().getQp(),
				rcMbDataAccessBase.getMbData().getQp(), uiCoded );

			if( rcMbDataAccess.getMbData().isIntra16x16() )
			{
				if( *(rcMbDataAccess.getMbTCoeffs().get( cIdx )) )
					uiCoded--;
			}

			if( uiCoded )
				uiBCBP |= 1<<cIdx;
		}

		if( rcMbDataAccess.getMbData().isIntra16x16() )
		{
			uiBCBP = uiBCBP?((1<<16)-1):0;
		}
	}

	// Add the chroma coefficients and update the BCBP
  m_pcTransform->addPredictionChromaBlocks( rcMbDataAccess.getMbTCoeffs().get( CIdx(0) ),
                                            rcMbDataAccessBase.getMbTCoeffs().get( CIdx(0) ),
                                            rcMbDataAccess.getSH().getCbQp( rcMbDataAccess.getMbData().getQp() ),
                                            rcMbDataAccess.getSH().getBaseSliceHeader()->getCbQp( rcMbDataAccessBase.getMbData().getQp() ),
                                            bChromaDC, bChromaAC );
  m_pcTransform->addPredictionChromaBlocks( rcMbDataAccess.getMbTCoeffs().get( CIdx(4) ),
                                            rcMbDataAccessBase.getMbTCoeffs().get( CIdx(4) ),
                                            rcMbDataAccess.getSH().getCrQp( rcMbDataAccess.getMbData().getQp() ),
                                            rcMbDataAccess.getSH().getBaseSliceHeader()->getCrQp( rcMbDataAccessBase.getMbData().getQp() ),
                                            bChromaDC, bChromaAC );

	uiBCBP |= (bChromaAC?2:(bChromaDC?1:0))<<16;

	// Update the CBP
	rcMbDataAccess.getMbData().setAndConvertMbExtCbp( uiBCBP );

	// Update the Intra16x16 mode
	if( rcMbDataAccess.getMbData().isIntra16x16() )
	{
		UInt uiMbType = INTRA_4X4 + 1;
		UInt uiPredMode = rcMbDataAccess.getMbData().intraPredMode();
		UInt uiChromaCbp = uiBCBP>>16;
		Bool bACcoded = (uiBCBP && ((1<<16)-1));

		uiMbType += uiPredMode;
        uiMbType += ( bACcoded ) ? 12 : 0;
        uiMbType += uiChromaCbp << 2;

		rcMbDataAccess.getMbData().setMbMode( MbMode(uiMbType) );

		// Sanity checks
		if( rcMbDataAccess.getMbData().intraPredMode() != uiPredMode )
			AOT(1);
		if( rcMbDataAccess.getMbData().getCbpChroma16x16() != uiChromaCbp )
			AOT(1);
		if( rcMbDataAccess.getMbData().isAcCoded() != bACcoded )
			AOT(1);
	}
Example #14
0
// JVT-V035
ErrVal
SliceEncoder::updatePictureAVCRewrite( ControlData& rcControlData, UInt uiMbInRow )
{
  ROF( m_bInitDone );

  SliceHeader&  rcSliceHeader   = *rcControlData.getSliceHeader();
  FMO&          rcFMO           = *rcSliceHeader.getFMO();
  MbDataCtrl*   pcMbDataCtrl    = rcControlData.getMbDataCtrl();
  MbDataCtrl*   pcBaseLayerCtrl = rcControlData.getBaseLayerCtrl();

  //====== initialization ======
  RNOK( pcMbDataCtrl->initSlice( rcSliceHeader, DECODE_PROCESS, false, NULL ) );

  if( rcSliceHeader.getTCoeffLevelPredictionFlag() == true )
  {
	  // Update the macroblock state
	  // Must be done after the bit-stream has been constructed
    for( Int iSliceGroupId = rcFMO.getFirstSliceGroupId(); ! rcFMO.SliceGroupCompletelyCoded( iSliceGroupId ); iSliceGroupId = rcFMO.getNextSliceGroupId( iSliceGroupId ) )
    {
      UInt  uiFirstMbInSliceGroup = rcFMO.getFirstMBOfSliceGroup( iSliceGroupId );
      UInt  uiLastMbInSliceGroup  = rcFMO.getLastMBInSliceGroup ( iSliceGroupId );

      for( UInt uiMbAddress = uiFirstMbInSliceGroup; uiMbAddress <= uiLastMbInSliceGroup; uiMbAddress = rcFMO.getNextMBNr( uiMbAddress ) )
	    {
		    UInt          uiMbY               = 0;
		    UInt          uiMbX               = 0;
		    MbDataAccess* pcMbDataAccess      = 0;
		    MbDataAccess* pcMbDataAccessBase  = 0;

        rcSliceHeader.getMbPositionFromAddress( uiMbY, uiMbX, uiMbAddress );
		    RNOK( pcMbDataCtrl->initMb( pcMbDataAccess, uiMbY, uiMbX ) );

		    if( pcBaseLayerCtrl )
		    {
			    RNOK( pcBaseLayerCtrl ->initMb( pcMbDataAccessBase, uiMbY, uiMbX ) );
			    pcMbDataAccess->setMbDataAccessBase( pcMbDataAccessBase );
		    }

        // modify QP values (as specified in G.8.1.5.1.2)
        if( pcMbDataAccess->getMbData().getMbCbp() == 0 && ( pcMbDataAccess->getMbData().getMbMode() == INTRA_BL || pcMbDataAccess->getMbData().getResidualPredFlag() ) )
        {
          pcMbDataAccess->getMbData().setQp( pcMbDataAccessBase->getMbData().getQp() );
        }

		    if( pcMbDataAccess->isTCoeffPred() )
		    {
			    if( pcMbDataAccess->getMbData().getMbMode() == INTRA_BL )
			    {
				    // We're going to use the BL skip flag to correctly decode the intra prediction mode
				    AOT( pcMbDataAccess->getMbData().getBLSkipFlag() == false );

				    // Inherit the mode of the base block
				    pcMbDataAccess->getMbData().setMbMode( pcMbDataAccessBase->getMbData().getMbMode() );

				    // Inherit intra prediction modes
				    for( B4x4Idx cIdx; cIdx.isLegal(); cIdx++ )
					    pcMbDataAccess->getMbData().intraPredMode(cIdx) = pcMbDataAccessBase->getMbData().intraPredMode(cIdx);

				    pcMbDataAccess->getMbData().setChromaPredMode( pcMbDataAccessBase->getMbData().getChromaPredMode() );
			    }

  		    // The 8x8 transform flag is present in the bit-stream unless transform coefficients
			    // are not transmitted at the enhancement layer.  In this case, inherit the base layer
			    // transform type.  This makes intra predition work correctly, etc.
          if( ( pcMbDataAccess->getMbData().getMbCbp() & 0x0F ) == 0 )
          {
            pcMbDataAccess->getMbData().setTransformSize8x8( pcMbDataAccessBase->getMbData().isTransformSize8x8() );
          }

			    xAddTCoeffs2( *pcMbDataAccess, *pcMbDataAccessBase );
		    }

        if( pcMbDataAccess->getMbAddress() != uiFirstMbInSliceGroup &&
            pcMbDataAccess->getSH().getTCoeffLevelPredictionFlag() &&
           !pcMbDataAccess->getSH().getNoInterLayerPredFlag() &&
           !pcMbDataAccess->getMbData().isIntra16x16() &&
            pcMbDataAccess->getMbData().getMbExtCbp() == 0 )
        {
          pcMbDataAccess->getMbData().setQp4LF( pcMbDataAccess->getLastQp4LF() );
        }
        else
        {
          pcMbDataAccess->getMbData().setQp4LF( pcMbDataAccess->getMbData().getQp() );
        }
	    }
    }
  }

  return Err::m_nOK;
}