Ejemplo n.º 1
0
Bool MbData::calcBCBP( UInt uiStart, UInt uiStop, UInt uiPos ) const
{
  AOF( uiStart != uiStop );
  if( uiPos < 16 )
  {
    if( isTransformSize8x8() )
    {
      UInt uiTab[] = { 0, 1, 0, 1, 2, 3, 2, 3 };
      return ( ( calcMbCbp( uiStart, uiStop ) >> uiTab[uiPos>>1] ) & 1 ) != 0;
    }
    // Luma 4x4 block
    if( uiStart == 0 && isIntra16x16() )
      uiStart = 1;

    const UChar  *pucScan = getFieldFlag() ? g_aucFieldScan : g_aucFrameScan;
    const TCoeff *piCoeff = getMbTCoeffs().get( B4x4Idx(uiPos) );
    for( UInt ui = uiStart; ui < uiStop; ui++ )
    {
      if( piCoeff[pucScan[ui]] )
      {
        return true;
      }
    }
  }
  else if( uiPos < 24 )
Ejemplo n.º 2
0
Bool
RefPicIdc::isValid() const
{
  ROTRS( m_ePicType == NOT_SPECIFIED,                         false );
  const Frame* pcFrame = m_pcFrame->getPic( m_ePicType );
  AOF  ( pcFrame );
  ROFRS( pcFrame->getPoc() == m_iPoc,                         false );
  return true;
}
Ejemplo n.º 3
0
ErrVal
Frame::residualUpsampling( Frame*             pcBaseFrame,
                           DownConvert&       rcDownConvert,
                           ResizeParameters*  pcParameters,
                           MbDataCtrl*        pcMbDataCtrlBase )
{
  AOF ( m_ePicType == FRAME );
  rcDownConvert.residualUpsampling( this, pcBaseFrame, pcParameters, pcMbDataCtrlBase );
  return Err::m_nOK;
}
Ejemplo n.º 4
0
Void
RefPicIdc::set( const Frame* pcFrame )
{
  if( pcFrame )
  {
    m_iPoc      = pcFrame->getPoc     ();
    m_ePicType  = pcFrame->getPicType ();
    m_pcFrame   = pcFrame->getFrame   ();
    AOF( m_pcFrame );
    return;
  }
  m_iPoc      = 0;
  m_ePicType  = NOT_SPECIFIED;
  m_pcFrame   = 0;
}
Ejemplo n.º 5
0
ErrVal
H264AVCEncoderTest::destroy()
{
  m_cBinDataStartCode.reset();

  if( m_pcH264AVCEncoder )       
  {
    RNOK( m_pcH264AVCEncoder->uninit() );       
    RNOK( m_pcH264AVCEncoder->destroy() );       
  }

  for( UInt ui = 0; ui < MAX_LAYERS; ui++ )
  {
    if( m_apcWriteYuv[ui] )              
    {
      RNOK( m_apcWriteYuv[ui]->destroy() );  
    }

    if( m_apcReadYuv[ui] )              
    {
      RNOK( m_apcReadYuv[ui]->uninit() );  
      RNOK( m_apcReadYuv[ui]->destroy() );  
    }
  }

  RNOK( m_pcEncoderCodingParameter->destroy());

  for( UInt uiLayer = 0; uiLayer < MAX_LAYERS; uiLayer++ )
  {
    AOF( m_acActivePicBufferList[uiLayer].empty() );
    
    //===== delete picture buffer =====
    PicBufferList::iterator iter;
    for( iter = m_acUnusedPicBufferList[uiLayer].begin(); iter != m_acUnusedPicBufferList[uiLayer].end(); iter++ )
    {
      delete (*iter)->getBuffer();
      delete (*iter);
    }
    for( iter = m_acActivePicBufferList[uiLayer].begin(); iter != m_acActivePicBufferList[uiLayer].end(); iter++ )
    {
      delete (*iter)->getBuffer();
      delete (*iter);
    }
  }

  delete this;
  return Err::m_nOK;
}
Ejemplo n.º 6
0
ErrVal
Frame::intraUpsampling( Frame*                pcBaseFrame,
                        Frame*                pcTempBaseFrame,
                        Frame*                pcTempFrame,
                        DownConvert&          rcDownConvert,
                        ResizeParameters*     pcParameters,
                        MbDataCtrl*           pcMbDataCtrlBase,
                        MbDataCtrl*           pcMbDataCtrlPredFrm,
                        MbDataCtrl*           pcMbDataCtrlPredFld,
                        ReconstructionBypass* pcReconstructionBypass,
                        Bool                  bConstrainedIntraUpsamplingFlag,
                        Bool*                 pabBaseModeAllowedFlagArrayFrm,
                        Bool*                 pabBaseModeAllowedFlagArrayFld )
{
  AOF ( m_ePicType == FRAME );
  rcDownConvert.intraUpsampling( this, pcBaseFrame, pcTempFrame, pcTempBaseFrame, pcParameters,
                                 pcMbDataCtrlBase, pcMbDataCtrlPredFrm, pcMbDataCtrlPredFld,
                                 pcReconstructionBypass, pabBaseModeAllowedFlagArrayFrm, pabBaseModeAllowedFlagArrayFld, bConstrainedIntraUpsamplingFlag );
  return Err::m_nOK;
}
Ejemplo n.º 7
0
Void TAppRendererTop::render()
{
  xCreateLib();
  xInitLib();

  // Create Buffers Input Views;
  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvBaseDepth;

  // TemporalImprovement Filter
  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;

  Int aiPad[2] = { 0, 0 };

  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    TComPicYuv* pcNewVideoPic = new TComPicYuv;
    TComPicYuv* pcNewDepthPic = new TComPicYuv;

    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseVideo.push_back(pcNewVideoPic);

    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
    apcPicYuvBaseDepth.push_back(pcNewDepthPic);

    //Temporal improvement Filter
    if ( m_bTempDepthFilter )
    {
      pcNewVideoPic = new TComPicYuv;
      pcNewDepthPic = new TComPicYuv;

      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);

      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
    }
  }

  // Create Buffer for synthesized View
  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );

  Bool bAnyEOS = false;

  Int iNumOfRenderedFrames = 0;
  Int iFrame = 0;

  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
  {
    if ( iFrame >= m_iFrameSkip ) 
    {
      // read in depth and video
      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
      {
        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;

        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();

        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();

        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();

        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
        {
          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
        }
      }
    }
    else    
    {
      std::cout << "Skipping Frame " << iFrame << std::endl;

      iFrame++;
      continue;
    }

    m_cCameraData.update( (UInt)iFrame - m_iFrameSkip );

    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
    {
      Int  iLeftBaseViewIdx  = -1;
      Int  iRightBaseViewIdx = -1;

      Bool bIsBaseView = false;

      Int iRelDistToLeft;
      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
      Bool bHasRView = ( iRightBaseViewIdx != -1 );
      Bool bRender   = true;

      Int  iBlendMode = m_iBlendMode;
      Int  iSimEnhBaseView = 0;

      switch( m_iRenderDirection )
      {
      /// INTERPOLATION
      case 0:
        AOF( bHasLRView || bIsBaseView );

        if ( !bHasLRView && bIsBaseView && m_iBlendMode == 0 )
        {
          bRender = false;
        }
        else
        {
          if ( bIsBaseView )
          {
            AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
            Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];

            if ( m_iBlendMode == 1 )
            {
              if ( iSortedBaseViewIdx - 1 >= 0 )
              {
                iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
                bRender = true;
              }
              else
              {
                bRender = false;
              }
            }
            else if ( m_iBlendMode == 2 )
            {
              if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
              {
                iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
                bRender = true;
              }
              else
              {
                bRender = false;
              }
            }
          }

          if ( m_iBlendMode == 3 )
          {
            if ( bIsBaseView && (iLeftBaseViewIdx == 0) )
            {
              bRender = false;
            }
            else
            {
              Int iDistLeft  = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx ]  );
              Int iDistRight = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iRightBaseViewIdx]  );

              Int iFillViewIdx = iDistLeft > iDistRight ? iLeftBaseViewIdx : iRightBaseViewIdx;

              if( m_cCameraData.getBaseId2SortedId()[0] < m_cCameraData.getBaseId2SortedId() [iFillViewIdx] )
              {
                iBlendMode        = 1;
                iLeftBaseViewIdx  = 0;
                iRightBaseViewIdx = iFillViewIdx;
              }
              else
              {
                iBlendMode        = 2;
                iLeftBaseViewIdx  = iFillViewIdx;
                iRightBaseViewIdx = 0;
              }

            }
          }
          else
          {
            iBlendMode = m_iBlendMode;
          }
        }

        if ( m_bSimEnhance )
        {
          if ( m_iNumberOfInputViews == 3 && m_cCameraData.getRelSynthViewNumbers()[ iSynthViewIdx ] < VIEW_NUM_PREC  )
          {
            iSimEnhBaseView = 2; // Take middle view
          }
          else
          {
            iSimEnhBaseView = 1; // Take left view
          }
        }

          if ( bRender )
          {
          std::cout << "Rendering Frame "    << iFrame
                    << " of View "           << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx    ] / VIEW_NUM_PREC
                    << "   Left BaseView: "  << (Double) m_cCameraData.getBaseViewNumbers() [iLeftBaseViewIdx ] / VIEW_NUM_PREC
                    << "   Right BaseView: " << (Double) m_cCameraData.getBaseViewNumbers() [iRightBaseViewIdx] / VIEW_NUM_PREC
                    << "   BlendMode: "      << iBlendMode
                    << std::endl;

          m_pcRenTop->setShiftLUTs(
            m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
            m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx],
            m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx ],

            iRelDistToLeft
          );

          m_pcRenTop->interpolateView(
            apcPicYuvBaseVideo[iLeftBaseViewIdx ],
            apcPicYuvBaseDepth[iLeftBaseViewIdx ],
            apcPicYuvBaseVideo[iRightBaseViewIdx],
            apcPicYuvBaseDepth[iRightBaseViewIdx],
            pcPicYuvSynthOut,
            iBlendMode,
            iSimEnhBaseView
            );
        }
        else
        {
          AOT(iLeftBaseViewIdx != iRightBaseViewIdx );
          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut );
          std::cout << "Copied    Frame " << iFrame
                    << " of View "        << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC
                    << "   (BaseView)  "    << std::endl;
        }

        break;
      /// EXTRAPOLATION FROM LEFT
      case 1:
        if ( !bHasLView ) // View to render is BaseView
        {
          bRender = false;
        }

          if (  bIsBaseView )
          {
          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
          if ( iSortedBaseViewIdx - 1 >= 0 )
          {
            iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
          }
          else
          {
            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
            bRender = false;
          }
        }


        if (bRender)
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          m_pcRenTop->setShiftLUTs( m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, NULL, -1 );
          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iLeftBaseViewIdx ], apcPicYuvBaseDepth[iLeftBaseViewIdx ], pcPicYuvSynthOut, true );
        }
        break;
      /// EXTRAPOLATION FROM RIGHT
      case 2:            // extrapolation from right
        if ( !bHasRView ) // View to render is BaseView
        {
          bRender = false;
        }

          if (  bIsBaseView )
          {

          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
          if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
          {
            iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
          }
          else
          {
            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
            bRender = false;
          }
        }

        if ( bRender )
        {
          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
          m_pcRenTop->setShiftLUTs( NULL, NULL,NULL, m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx ][iSynthViewIdx],
            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx],NULL, iRelDistToLeft);
          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iRightBaseViewIdx ], apcPicYuvBaseDepth[iRightBaseViewIdx ], pcPicYuvSynthOut, false);
        }
        break;
      }

      // Write Output

      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
    }
    iFrame++;
    iNumOfRenderedFrames++;
  }

  // Delete Buffers
  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
  {
    apcPicYuvBaseVideo[uiBaseView]->destroy();
    delete apcPicYuvBaseVideo[uiBaseView];

    apcPicYuvBaseDepth[uiBaseView]->destroy();
    delete apcPicYuvBaseDepth[uiBaseView];

    // Temporal Filter
    if ( m_bTempDepthFilter )
    {
      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
      delete apcPicYuvLastBaseVideo[uiBaseView];

      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
      delete apcPicYuvLastBaseDepth[uiBaseView];
    }
  }

  pcPicYuvSynthOut->destroy();
  delete pcPicYuvSynthOut;

  xDestroyLib();

}
/**
 - create internal class
 - initialize internal class
 - until the end of the bitstream, call decoding function in TDecTop class
 - delete allocated buffers
 - destroy internal class
 .
 */
Void TAppDecTop::decode()
{
#if VIDYO_VPS_INTEGRATION|QC_MVHEVC_B0046
  increaseNumberOfViews( 0, 0, 0 );
#else
  increaseNumberOfViews( 1 );
#endif
  
#if FLEX_CODING_ORDER_M23723
  Int iDepthViewIdx = 0;
  Int iTextureViewIdx=0;
  Bool firstFrame=1;
  Bool viewIdZero=true;
  Int fcoIndex=0;  //when the current frame is not first frame,use FCO_index stand for viewDepth. 
#endif

  Int                 viewDepthId = 0;
  Int                 previousViewDepthId  = 0;
  UInt                uiPOC[MAX_VIEW_NUM*2];
  TComList<TComPic*>* pcListPic[MAX_VIEW_NUM*2];
  Bool                newPicture[MAX_VIEW_NUM*2];
  Bool                previousPictureDecoded = false;
  for( Int i = 0; i < MAX_VIEW_NUM*2; i++ )
  {
    uiPOC[i] = 0;
    pcListPic[i] = NULL;
    newPicture[i] = false;
#if FLEX_CODING_ORDER_M23723
    m_fcoOrder[i] = ' ';
#endif

  }

  ifstream bitstreamFile(m_pchBitstreamFile, ifstream::in | ifstream::binary);
  if (!bitstreamFile)
  {
    fprintf(stderr, "\nfailed to open bitstream file `%s' for reading\n", m_pchBitstreamFile);
    exit(EXIT_FAILURE);
  }

  if( m_pchScaleOffsetFile ) 
  { 
    m_pScaleOffsetFile = ::fopen( m_pchScaleOffsetFile, "wt" ); 
    AOF( m_pScaleOffsetFile ); 
  }
  m_cCamParsCollector.init( m_pScaleOffsetFile );

  InputByteStream bytestream(bitstreamFile);

  while (!!bitstreamFile)
  {
    /* location serves to work around a design fault in the decoder, whereby
     * the process of reading a new slice that is the first slice of a new frame
     * requires the TDecTop::decode() method to be called again with the same
     * nal unit. */
    streampos location = bitstreamFile.tellg();
    AnnexBStats stats = AnnexBStats();
    vector<uint8_t> nalUnit;
    InputNALUnit nalu;
    byteStreamNALUnit(bytestream, nalUnit, stats);

    // call actual decoding function
    if (nalUnit.empty())
    {
      /* this can happen if the following occur:
       *  - empty input file
       *  - two back-to-back start_code_prefixes
       *  - start_code_prefix immediately followed by EOF
       */
      fprintf(stderr, "Warning: Attempt to decode an empty NAL unit\n");
    }
    else
    {
      read(nalu, nalUnit);
#if QC_MVHEVC_B0046
    viewDepthId = nalu.m_layerId;
    Int depth = 0;
    Int viewId = viewDepthId;
#else
#if VIDYO_VPS_INTEGRATION
      Int viewId = 0;
      Int depth = 0;
      
      if(nalu.m_nalUnitType != NAL_UNIT_VPS || nalu.m_layerId)
      {
        // code assumes that the first nal unit is VPS
        // currently, this is a hack that requires non-first VPSs have non-zero layer_id
        viewId = getVPSAccess()->getActiveVPS()->getViewId(nalu.m_layerId);
        depth = getVPSAccess()->getActiveVPS()->getDepthFlag(nalu.m_layerId);
      }
#if FLEX_CODING_ORDER_M23723
      if (viewId>0)
      {
        viewIdZero=false;
      }
      if (viewIdZero==false&&viewId==0)
      {
        firstFrame=0; //if viewId has been more than zero and now it set to zero again, we can see that it is not the first view
      }
      if (firstFrame)
      { // if the current view is first frame, we set the viewDepthId as texture plus depth and get the FCO order 
        viewDepthId = iDepthViewIdx+iTextureViewIdx;
        m_fcoViewDepthId=viewDepthId;
      }
      else
      {//if current view is not first frame, we set the viewDepthId depended on the FCO order
        viewDepthId=0;
        if (depth)
        {
          for (fcoIndex=0;fcoIndex<2*MAX_VIEW_NUM;fcoIndex++ )
          {
            if (m_fcoOrder[fcoIndex]=='D')
            {
              if (viewId==viewDepthId)
                break;
              else
                viewDepthId++;
            }
          }
        }
        else
        {
          for (fcoIndex=0;fcoIndex<2*MAX_VIEW_NUM;fcoIndex++ )
          {
            if (m_fcoOrder[fcoIndex]=='T')
            {
              if (viewId==viewDepthId)
                break;
              else
                viewDepthId++;
            }
          }
        }

        viewDepthId=fcoIndex;

      }


#else
      viewDepthId = nalu.m_layerId;   // coding order T0D0T1D1T2D2
#endif
    
#else
      Int viewId = nalu.m_viewId;
      Int depth = nalu.m_isDepth ? 1 : 0;
#if FLEX_CODING_ORDER_M23723
      if (viewId>0)
      {
        viewIdZero=false;
      }
      if (viewIdZero==false&&viewId==0)
      {
        firstFrame=0;
      }
      if (firstFrame)
      {
        viewDepthId = iDepthViewIdx+iTextureViewIdx;
        m_fcoViewDepthId=viewDepthId;
      }
      else
      {
        viewDepthId=0;
        if (depth)
        {
          for (fcoIndex=0;fcoIndex<2*MAX_VIEW_NUM;fcoIndex++ )
          {
            if (m_fcoOrder[fcoIndex]=='D')
            {
              if (viewId==viewDepthId)
                break;
              else
                viewDepthId++;
            }
          }
        }
        else
        {
          for (fcoIndex=0;fcoIndex<2*MAX_VIEW_NUM;fcoIndex++ )
          {
            if (m_fcoOrder[fcoIndex]=='T')
            {
              if (viewId==viewDepthId)
                break;
              else
                viewDepthId++;
            }
          }
        }
        viewDepthId=fcoIndex;
      }
#else
      viewDepthId = viewId * 2 + depth;   // coding order T0D0T1D1T2D2
#endif
#endif
#endif     
      newPicture[viewDepthId] = false;
      if( viewDepthId >= m_tDecTop.size() )      
      {
#if VIDYO_VPS_INTEGRATION|QC_MVHEVC_B0046
        increaseNumberOfViews( viewDepthId, viewId, depth );
#else
        increaseNumberOfViews( viewDepthId +1 );
#endif   
      }
      if(m_iMaxTemporalLayer >= 0 && nalu.m_temporalId > m_iMaxTemporalLayer)
      {
        previousPictureDecoded = false; 
      }
      if(m_tDecTop.size() > 1 && (viewDepthId != previousViewDepthId) && previousPictureDecoded )
      {
        m_tDecTop[previousViewDepthId]->executeDeblockAndAlf(uiPOC[previousViewDepthId], pcListPic[previousViewDepthId], m_iSkipFrame, m_pocLastDisplay[previousViewDepthId]);
      } 
      if( ( viewDepthId == 0 && (viewDepthId != previousViewDepthId) ) || m_tDecTop.size() == 1 )
      {
#if H3D_IVRP
        for( Int i = 0; i < m_tDecTop.size(); i++ )
        {
          m_tDecTop[i]->deleteExtraPicBuffers( uiPOC[i] );
        }
#endif
        for( Int i = 0; i < m_tDecTop.size(); i++ )
        {
          m_tDecTop[i]->compressMotion( uiPOC[i] );
        }
      }   
      if( !(m_iMaxTemporalLayer >= 0 && nalu.m_temporalId > m_iMaxTemporalLayer) )
      {
#if QC_MVHEVC_B0046
        if(viewDepthId && m_tDecTop[viewDepthId]->m_bFirstNal== false)
        {
          m_tDecTop[viewDepthId]->m_bFirstNal = true;
          ParameterSetManagerDecoder* pDecV0 = m_tDecTop[0]->xGetParaSetDec();
          m_tDecTop[viewDepthId]->xCopyVPS(pDecV0->getPrefetchedVPS(0));
          m_tDecTop[viewDepthId]->xCopySPS(pDecV0->getPrefetchedSPS(0));
          m_tDecTop[viewDepthId]->xCopyPPS(pDecV0->getPrefetchedPPS(0));
        }
#endif
        newPicture[viewDepthId] = m_tDecTop[viewDepthId]->decode(nalu, m_iSkipFrame, m_pocLastDisplay[viewDepthId]);
        if (newPicture[viewDepthId])
        {
          bitstreamFile.clear();
          /* location points to the current nalunit payload[1] due to the
           * need for the annexB parser to read three extra bytes.
           * [1] except for the first NAL unit in the file
           *     (but bNewPicture doesn't happen then) */
          bitstreamFile.seekg(location-streamoff(3));
          bytestream.reset();
        }
        if( nalu.isSlice() )
        {
          previousPictureDecoded = true;
#if FLEX_CODING_ORDER_M23723
        if (firstFrame)
        {
            if (depth)
            {
                iDepthViewIdx++;
                m_fcoOrder[viewDepthId]='D';
            }
            else
           {
                iTextureViewIdx++;
                m_fcoOrder[viewDepthId]='T';
           }
          }

#endif
        }
      }
    }
    if( ( (newPicture[viewDepthId] || !bitstreamFile) && m_tDecTop.size() == 1) || (!bitstreamFile && previousPictureDecoded == true) )  
    {
      m_tDecTop[viewDepthId]->executeDeblockAndAlf(uiPOC[viewDepthId], pcListPic[viewDepthId], m_iSkipFrame, m_pocLastDisplay[viewDepthId]);
    }
    if( pcListPic[viewDepthId] )
    {
#if QC_REM_IDV_B0046
      Int iviewId = m_tDecTop[viewDepthId]->getViewId();
      if( newPicture[viewDepthId] && (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR || ((nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR && iviewId) && m_tDecTop[viewDepthId]->getNalUnitTypeBaseView() == NAL_UNIT_CODED_SLICE_IDR)) )
#else
      if( newPicture[viewDepthId] && (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR || (nalu.m_nalUnitType == NAL_UNIT_CODED_SLICE_IDV && m_tDecTop[viewDepthId]->getNalUnitTypeBaseView() == NAL_UNIT_CODED_SLICE_IDR)) )
#endif
      {
        xFlushOutput( pcListPic[viewDepthId], viewDepthId );
      }
      // write reconstruction to file
      if(newPicture[viewDepthId])
      {
        xWriteOutput( pcListPic[viewDepthId], viewDepthId, nalu.m_temporalId );
      }
    }
    previousViewDepthId = viewDepthId;
  } 
  if( m_cCamParsCollector.isInitialized() )
  {
    m_cCamParsCollector.setSlice( 0 );
  }
  // last frame
  for( Int viewDepthIdx = 0; viewDepthIdx < m_tDecTop.size(); viewDepthIdx++ )
  {
    xFlushOutput( pcListPic[viewDepthIdx], viewDepthIdx );
  }  
  xDestroyDecLib();
}
Ejemplo n.º 9
0
ErrVal
H264AVCEncoderTest::destroy()
{

 //printf("H264AVCEncoderTest::destroy()\n");
  m_cBinDataStartCode.reset();
  
 //printf("Destroy Creater Encoders\n");
  for( UInt ui = 0; ui < 2; ui++ ){
	  if( m_pcH264AVCEncoder[ui] )       
	  {
		RNOK( m_pcH264AVCEncoder[ui]->uninit() );       
		RNOK( m_pcH264AVCEncoder[ui]->destroy() ); 
		
	  }
  }
  
  //printf("Destroy m_apcWriteYuv i m_apcReadYuv\n");
  for( UInt ui = 0; ui < MAX_LAYERS; ui++ )
  {
	 if( m_pcEncoderCodingParameter[0]->isDebug() )              
    {
		
      RNOK( m_apcWriteYuv[ui]->destroy() );  
    }

    if( m_apcReadYuv[ui] )              
    {
	 
      RNOK( m_apcReadYuv[ui]->uninit() );  
      RNOK( m_apcReadYuv[ui]->destroy() );
	  
    }
	
  }

  
  for( UInt ui = 0; ui < 2; ui++ ){
	 
	  RNOK( m_pcEncoderCodingParameter[ui]->destroy());
  }
  

  for( UInt uiView = 0; uiView < MAX_LAYERS; uiView++ )
  {
	
    AOF( m_acActivePicBufferList[uiView].empty() );
    
    //===== delete picture buffer =====
    PicBufferList::iterator iter;
    for( iter = m_acUnusedPicBufferList[uiView].begin(); iter != m_acUnusedPicBufferList[uiView].end(); iter++ )
    {
      delete (*iter)->getBuffer();
      delete (*iter);
    }
    for( iter = m_acActivePicBufferList[uiView].begin(); iter != m_acActivePicBufferList[uiView].end(); iter++ )
    {
      delete (*iter)->getBuffer();
      delete (*iter);
    }
  }

  
  delete this;
  
  return Err::m_nOK;
}