コード例 #1
0
void ParallelGapRealigner::threadRealignGaps(boost::unique_lock<boost::mutex> &lock, BinData &binData, BinData::iterator &nextUnprocessed, unsigned long threadNumber)
{
//    ISAAC_THREAD_CERR << "threadRealignGaps this " << this  << std::endl;

    isaac::build::GapRealigner &realigner = threadGapRealigners_.at(threadNumber);
    isaac::alignment::Cigar &cigars = threadCigars_.at(threadNumber);
    static const std::size_t READS_AT_A_TIME = 1024;

//    int blockCount = 0;
    while (binData.indexEnd() != nextUnprocessed)
    {
        BinData::iterator ourBegin = nextUnprocessed;
        const std::size_t readsToProcess = std::min<std::size_t>(READS_AT_A_TIME, std::distance(ourBegin, binData.indexEnd()));
        nextUnprocessed += readsToProcess;
        {
            common::unlock_guard<boost::unique_lock<boost::mutex> > unlock(lock);
            for (const BinData::iterator ourEnd = ourBegin + readsToProcess; ourEnd != ourBegin; ++ourBegin)
            {
                PackedFragmentBuffer::Index &index = *ourBegin;
                io::FragmentAccessor &fragment = binData.data_.getFragment(index);
                if (binData.bin_.hasPosition(fragment.fStrandPosition_))
                {
                    cigars.clear();
                    realign(realigner, fragment, index, binData, cigars);
                }
            }
        }
//        ++blockCount;
    }

//    ISAAC_THREAD_CERR << "Thread " << threadNumber << " realigned " << blockCount << " blocks for " << binData.bin_ << std::endl;
}
コード例 #2
0
void ParallelGapRealigner::realign(
    isaac::build::GapRealigner& realigner,
    io::FragmentAccessor& fragment, PackedFragmentBuffer::Index &index,
    BinData& binData, isaac::alignment::Cigar& cigars)
{
    if (realigner.realign(binData.getRealignerGaps(fragment.barcode_), binData.bin_.getBinStart(), binData.bin_.getBinEnd(), index, fragment, binData.data_, cigars))
    {
        boost::unique_lock<boost::mutex> lock(cigarBufferMutex_);
        {
            const std::size_t before = binData.additionalCigars_.size();
            binData.additionalCigars_.addOperations(index.cigarBegin_, index.cigarEnd_);
            index.cigarBegin_ = &binData.additionalCigars_.at(before);
            index.cigarEnd_ = &binData.additionalCigars_.back() + 1;
            // realignment affects both reads. We must make sure realignment updates on one read don't
            // collide with post-realignmnet pair updates from another read.
            realigner.updatePairDetails(barcodeTemplateLengthStatistics_, index, fragment, binData.data_);
        }
    }
}
コード例 #3
0
ファイル: BinData.t.cpp プロジェクト: el-bart/WiFiChopper
void testObj::test<1>(void)
{
    const BinData bd(42);
    ensure_equals("invalid size", bd.size(), 42);
}
コード例 #4
0
ErrVal H264AVCDecoderTest::go()
{
  PicBuffer*    pcPicBuffer = NULL;
  PicBufferList cPicBufferOutputList; 
  PicBufferList cPicBufferUnusedList;
  PicBufferList cPicBufferReleaseList;

  UInt      uiMbX           = 0;
  UInt      uiMbY           = 0;
  UInt      uiNalUnitType   = 0;
  UInt      uiSize          = 0;
  UInt      uiLumOffset     = 0;
  UInt      uiCbOffset      = 0;
  UInt      uiCrOffset      = 0;
  UInt      uiFrame;
  
  Bool      bEOS            = false;
  Bool      bYuvDimSet      = false;


  // HS: packet trace
  UInt   uiMaxPocDiff = m_pcParameter->uiMaxPocDiff;
  UInt   uiLastPoc    = MSYS_UINT_MAX;
  UChar* pcLastFrame  = 0;
  UInt   uiPreNalUnitType = 0;

  cPicBufferOutputList.clear();
  cPicBufferUnusedList.clear();


  RNOK( m_pcH264AVCDecoder->init(true, m_pcParameter) ); 


  Bool bToDecode = false; //JVT-P031
  for( uiFrame = 0; ( uiFrame <= MSYS_UINT_MAX && ! bEOS); )
  {
    BinData* pcBinData;
    BinDataAccessor cBinDataAccessor;

    Int  iPos;
//    Bool bFinishChecking;

    RNOK( m_pcReadBitstream->getPosition(iPos) );

    //JVT-P031
    Bool bFragmented = false;
    Bool bDiscardable = false;
    Bool bStart = false;
    Bool bFirst = true;
    UInt uiTotalLength = 0;
#define MAX_FRAGMENTS 10 // hard-coded
    BinData* pcBinDataTmp[MAX_FRAGMENTS];
    BinDataAccessor cBinDataAccessorTmp[MAX_FRAGMENTS];
    UInt uiFragNb, auiStartPos[MAX_FRAGMENTS], auiEndPos[MAX_FRAGMENTS];
	Bool bConcatenated = false; //FRAG_FIX_3
    Bool bSkip  = false;  // Dong: To skip unknown NAL unit types
    uiFragNb = 0;
    bEOS = false;
    pcBinData = 0;

    while(!bStart && !bEOS)
    {
      if(bFirst)
      {
          RNOK( m_pcReadBitstream->setPosition(iPos) );
          bFirst = false;
      }

      RNOK( m_pcReadBitstream->extractPacket( pcBinDataTmp[uiFragNb], bEOS ) );

//TMM_EC {{
			if( !bEOS && ((pcBinDataTmp[uiFragNb]->data())[0] & 0x1f )== 0x0b)
			{
				printf("end of stream\n");
				bEOS=true;
				uiNalUnitType= uiPreNalUnitType;
        RNOK( m_pcReadBitstream->releasePacket( pcBinDataTmp[uiFragNb] ) );
        pcBinDataTmp[uiFragNb] = new BinData;
				uiTotalLength	=	0;
        pcBinDataTmp[uiFragNb]->set( new UChar[uiTotalLength], uiTotalLength );
			}
//TMM_EC }}

      pcBinDataTmp[uiFragNb]->setMemAccessor( cBinDataAccessorTmp[uiFragNb] );

      bSkip = false;
      // open the NAL Unit, determine the type and if it's a slice get the frame size

      RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessorTmp[uiFragNb], 
                                            uiNalUnitType, uiMbX, uiMbY, uiSize,  true, 
		  false, //FRAG_FIX_3
//		  bStart, auiStartPos[uiFragNb], auiEndPos[uiFragNb], bFragmented, bDiscardable ) );
		  bStart, auiStartPos[uiFragNb], auiEndPos[uiFragNb], bFragmented, bDiscardable, this->m_pcParameter->getNumOfViews(), bSkip ) );

      uiTotalLength += auiEndPos[uiFragNb] - auiStartPos[uiFragNb];

      // Dong: Skip unknown NAL units
      if( bSkip )
      {
        printf("Unknown NAL unit type: %d\n", uiNalUnitType);
        uiTotalLength -= (auiEndPos[uiFragNb] - auiStartPos[uiFragNb]);
      }
      else if(!bStart)
      {
        ROT( bEOS) ; //[email protected]
        uiFragNb++;
      }
      else
      {
        if(pcBinDataTmp[0]->size() != 0)
        {
          pcBinData = new BinData;
          pcBinData->set( new UChar[uiTotalLength], uiTotalLength );
          // append fragments
          UInt uiOffset = 0;
          for(UInt uiFrag = 0; uiFrag<uiFragNb+1; uiFrag++)
          {
              memcpy(pcBinData->data()+uiOffset, pcBinDataTmp[uiFrag]->data() + auiStartPos[uiFrag], auiEndPos[uiFrag]-auiStartPos[uiFrag]);
              uiOffset += auiEndPos[uiFrag]-auiStartPos[uiFrag];
              RNOK( m_pcReadBitstream->releasePacket( pcBinDataTmp[uiFrag] ) );
              pcBinDataTmp[uiFrag] = NULL;
              if(uiNalUnitType != 6) //JVT-T054
              m_pcH264AVCDecoder->decreaseNumOfNALInAU();
			  //FRAG_FIX_3
			  if(uiFrag > 0) 
				  bConcatenated = true; //~FRAG_FIX_3
          }
          
          pcBinData->setMemAccessor( cBinDataAccessor );
          bToDecode = false;
          if((uiTotalLength != 0) && (!bDiscardable || bFragmented))
          {
              //FRAG_FIX
			if( (uiNalUnitType == 20) || (uiNalUnitType == 21) || (uiNalUnitType == 1) || (uiNalUnitType == 5) )
            {
                uiPreNalUnitType=uiNalUnitType;
                RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessor, uiNalUnitType, uiMbX, uiMbY, uiSize, 
					//uiNonRequiredPic, //NonRequired JVT-Q066
                    false, bConcatenated, //FRAG_FIX_3
					bStart, auiStartPos[uiFragNb+1], auiEndPos[uiFragNb+1], 
//                    bFragmented, bDiscardable) );
                    bFragmented, bDiscardable, this->m_pcParameter->getNumOfViews(), bSkip) );
            }

        else if( uiNalUnitType == 14 )
          {
                uiPreNalUnitType=uiNalUnitType;
                RNOK( m_pcH264AVCDecoder->initPacket( &cBinDataAccessor, uiNalUnitType, uiMbX, uiMbY, uiSize, 
					//uiNonRequiredPic, //NonRequired JVT-Q066
                    false, bConcatenated, //FRAG_FIX_3
					bStart, auiStartPos[uiFragNb+1], auiEndPos[uiFragNb+1], 
//                    bFragmented, bDiscardable) );
                    bFragmented, bDiscardable,this->m_pcParameter->getNumOfViews(), bSkip) );
                    
              }
              else
                  m_pcH264AVCDecoder->initPacket( &cBinDataAccessor );
              bToDecode = true;

              if( uiNalUnitType == 14 )
                bToDecode = false;
          }
        }
      }
    }

    //~JVT-P031

//NonRequired JVT-Q066{
	if(m_pcH264AVCDecoder->isNonRequiredPic())
		continue;
//NonRequired JVT-Q066}


// JVT-Q054 Red. Picture {
  RNOK( m_pcH264AVCDecoder->checkRedundantPic() );
  if ( m_pcH264AVCDecoder->isRedundantPic() )
    continue;
// JVT-Q054 Red. Picture }



  if(bToDecode)//JVT-P031
  {
    // get new picture buffer if required if coded Slice || coded IDR slice
    pcPicBuffer = NULL;
    
    if( uiNalUnitType == 1 || uiNalUnitType == 5 || uiNalUnitType == 20 || uiNalUnitType == 21 )
    {
      RNOK( xGetNewPicBuffer( pcPicBuffer, uiSize ) );

      if( ! bYuvDimSet )
      {
        UInt uiLumSize  = ((uiMbX<<3)+  YUV_X_MARGIN) * ((uiMbY<<3)    + YUV_Y_MARGIN ) * 4;
        uiLumOffset     = ((uiMbX<<4)+2*YUV_X_MARGIN) * YUV_Y_MARGIN   + YUV_X_MARGIN;  
        uiCbOffset      = ((uiMbX<<3)+  YUV_X_MARGIN) * YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + uiLumSize; 
        uiCrOffset      = ((uiMbX<<3)+  YUV_X_MARGIN) * YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + 5*uiLumSize/4;
        bYuvDimSet = true;

        // HS: decoder robustness
        pcLastFrame = new UChar [uiSize];
        ROF( pcLastFrame );
      }
    }
    

    // decode the NAL unit
    RNOK( m_pcH264AVCDecoder->process( pcPicBuffer, cPicBufferOutputList, cPicBufferUnusedList, cPicBufferReleaseList ) );

	// ROI DECODE ICU/ETRI
	m_pcH264AVCDecoder->RoiDecodeInit();

	setCrop();//lufeng: support frame cropping

    // picture output
    while( ! cPicBufferOutputList.empty() )
    {

//JVT-V054    
      if(!m_pcWriteYuv->getFileInitDone() )
      {
		  //UInt *vcOrder = m_pcH264AVCDecoder->getViewCodingOrder();
		  UInt *vcOrder = m_pcH264AVCDecoder->getViewCodingOrder_SubStream();
		  if(vcOrder == NULL)//lufeng: in order to output non-MVC seq
          {
			  //UInt order=0;
			  m_pcH264AVCDecoder->addViewCodingOrder();
			  //vcOrder = m_pcH264AVCDecoder->getViewCodingOrder();
			  vcOrder = m_pcH264AVCDecoder->getViewCodingOrder_SubStream();
		  }
       		m_pcWriteYuv->xInitMVC(m_pcParameter->cYuvFile, vcOrder, m_pcParameter->getNumOfViews()); // JVT-AB024 modified remove active view info SEI  			
      }

        PicBuffer* pcPicBufferTmp = cPicBufferOutputList.front();
      cPicBufferOutputList.pop_front();
        if( pcPicBufferTmp != NULL )
      {
        // HS: decoder robustness
          while( uiLastPoc + uiMaxPocDiff < (UInt)pcPicBufferTmp->getCts() )
        {
          RNOK( m_pcWriteYuv->writeFrame( pcLastFrame + uiLumOffset, 
                                          pcLastFrame + uiCbOffset, 
                                          pcLastFrame + uiCrOffset,
                                           uiMbY << 4,
                                           uiMbX << 4,
                                          (uiMbX << 4)+ YUV_X_MARGIN*2 ) );
          printf("REPEAT FRAME\n");
          uiFrame   ++;
          uiLastPoc += uiMaxPocDiff;
        }

		  
          if(m_pcParameter->getNumOfViews() > 0)
          {
			  UInt view_cnt;
			  
			  for (view_cnt=0; view_cnt < m_pcParameter->getNumOfViews(); view_cnt++){
				//UInt tmp_order=m_pcH264AVCDecoder->getViewCodingOrder()[view_cnt];
				  UInt tmp_order=m_pcH264AVCDecoder->getViewCodingOrder_SubStream()[view_cnt];
				if ((UInt)pcPicBufferTmp->getViewId() == tmp_order)
					break;
			  }

			  RNOK( m_pcWriteYuv->writeFrame( *pcPicBufferTmp + uiLumOffset, 
                                              *pcPicBufferTmp + uiCbOffset, 
                                              *pcPicBufferTmp + uiCrOffset,
                                              uiMbY << 4,
                                              uiMbX << 4,
                                              (uiMbX << 4)+ YUV_X_MARGIN*2,
                                              //(UInt)pcPicBufferTmp->getViewId(),
											   view_cnt) ); 
          }
          else
        RNOK( m_pcWriteYuv->writeFrame( *pcPicBufferTmp + uiLumOffset, 
                                        *pcPicBufferTmp + uiCbOffset, 
                                        *pcPicBufferTmp + uiCrOffset,
                                         uiMbY << 4,
                                         uiMbX << 4,
                                        (uiMbX << 4)+ YUV_X_MARGIN*2 ) );
        uiFrame++;
      
    
        // HS: decoder robustness
        uiLastPoc = (UInt)pcPicBufferTmp->getCts();
        ::memcpy( pcLastFrame, *pcPicBufferTmp+0, uiSize*sizeof(UChar) );
      }
    }
   } 
    RNOK( xRemovePicBuffer( cPicBufferReleaseList ) );
    RNOK( xRemovePicBuffer( cPicBufferUnusedList ) );
    if( pcBinData )
    {
      RNOK( m_pcReadBitstream->releasePacket( pcBinData ) );
      pcBinData = 0;
    }
  }
  printf("\n %d frames decoded\n", uiFrame );

  delete [] pcLastFrame; // HS: decoder robustness
  
  RNOK( m_pcH264AVCDecoder->uninit( true ) );
  
  m_pcParameter->nFrames  = uiFrame;
  m_pcParameter->nResult  = 0;

  return Err::m_nOK;
}
コード例 #5
0
ErrVal
H264AVCEncoderTest::go()
{
  UInt                    uiWrittenBytes          = 0;
  const UInt              uiMaxFrame              = m_pcEncoderCodingParameter->getTotalFrames();
  UInt                    uiNumLayers             = ( m_pcEncoderCodingParameter->getMVCmode() ? 1 : m_pcEncoderCodingParameter->getNumberOfLayers() );
  UInt                    uiFrame;
  UInt                    uiLayer;
  UInt                    auiMbX                  [MAX_LAYERS];
  UInt                    auiMbY                  [MAX_LAYERS];
  UInt                    auiPicSize              [MAX_LAYERS];
  PicBuffer*              apcOriginalPicBuffer    [MAX_LAYERS];//original pic
  PicBuffer*              apcReconstructPicBuffer [MAX_LAYERS];//rec pic
  PicBufferList           acPicBufferOutputList   [MAX_LAYERS];
  PicBufferList           acPicBufferUnusedList   [MAX_LAYERS];
  ExtBinDataAccessorList  cOutExtBinDataAccessorList;
  Bool                    bMoreSets;

  
  //===== initialization =====
  RNOK( m_pcH264AVCEncoder->init( m_pcEncoderCodingParameter ) ); 


  //===== write parameter sets =====
  for( bMoreSets = true; bMoreSets;  )
  {
    UChar   aucParameterSetBuffer[1000];
    BinData cBinData;
    cBinData.reset();
    cBinData.set( aucParameterSetBuffer, 1000 );

    ExtBinDataAccessor cExtBinDataAccessor;
    cBinData.setMemAccessor( cExtBinDataAccessor );

    RNOK( m_pcH264AVCEncoder      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );
		if( m_pcH264AVCEncoder->getScalableSeiMessage() )
		{		
    RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
    RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
    
    uiWrittenBytes += 4 + cExtBinDataAccessor.size();
		}
    cBinData.reset();
  }

//JVT-W080, PDS SEI message
	if( m_pcEncoderCodingParameter->getMVCmode() && m_pcEncoderCodingParameter->getPdsEnable() )
	{
		//write SEI message
	  UChar   aucParameterSetBuffer[1000];
    BinData cBinData;
    cBinData.reset();
    cBinData.set( aucParameterSetBuffer, 1000 );

    ExtBinDataAccessor cExtBinDataAccessor;
    cBinData.setMemAccessor( cExtBinDataAccessor );

		const UInt uiSPSId = 0; //currently only one SPS with SPSId = 0
		UInt uiNumView       = m_pcEncoderCodingParameter->SpsMVC.getNumViewMinus1()+1;
		UInt* num_refs_list0_anc = new UInt [uiNumView];
		UInt* num_refs_list1_anc = new UInt [uiNumView];
		UInt* num_refs_list0_nonanc = new UInt [uiNumView];
		UInt* num_refs_list1_nonanc = new UInt [uiNumView];

		for( UInt i = 0; i < uiNumView; i++ )
		{
			num_refs_list0_anc[i]    = m_pcEncoderCodingParameter->SpsMVC.getNumAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 0 );
			num_refs_list1_anc[i]    = m_pcEncoderCodingParameter->SpsMVC.getNumAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 1 );
			num_refs_list0_nonanc[i] = m_pcEncoderCodingParameter->SpsMVC.getNumNonAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 0 );
			num_refs_list1_nonanc[i] = m_pcEncoderCodingParameter->SpsMVC.getNumNonAnchorRefsForListX( m_pcEncoderCodingParameter->SpsMVC.getViewCodingOrder()[i], 1 );		  
		}
//#define HELP_INFOR
#ifdef  HELP_INFOR
		printf("\n");
		for( UInt i = 0; i < uiNumView; i++ )
		{
			printf(" num_refs_list0_anchor: %d\tnum_refs_list0_nonanchor: %d\n num_refs_list1_anchor: %d\tnum_refs_list1_nonanchor: %d\n", num_refs_list0_anc[i], num_refs_list1_anc[i], num_refs_list0_nonanc[i], num_refs_list1_nonanc[i] );
		}
#endif

    UInt uiInitialPDIDelayAnc = m_pcEncoderCodingParameter->getPdsInitialDelayAnc();
    UInt uiInitialPDIDelayNonAnc = m_pcEncoderCodingParameter->getPdsInitialDelayNonAnc();
		if( uiInitialPDIDelayAnc < 2 )
			uiInitialPDIDelayAnc  = 2;
		if( uiInitialPDIDelayNonAnc < 2 )
			uiInitialPDIDelayNonAnc  = 2;
		RNOK( m_pcH264AVCEncoder->writePDSSEIMessage( &cExtBinDataAccessor
			                                           , uiSPSId
			                                           , uiNumView
			                                           , num_refs_list0_anc
																								 , num_refs_list1_anc
			                                           , num_refs_list0_nonanc
																								 , num_refs_list1_nonanc
																								 , uiInitialPDIDelayAnc
																								 , uiInitialPDIDelayNonAnc
																								) 
				);

		delete[] num_refs_list0_anc;
		delete[] num_refs_list1_anc;
		delete[] num_refs_list0_nonanc;
		delete[] num_refs_list1_nonanc;
		num_refs_list0_anc = NULL;
		num_refs_list1_anc = NULL;
		num_refs_list0_nonanc = NULL;
		num_refs_list1_nonanc = NULL;
	  if( m_pcEncoderCodingParameter->getCurentViewId() == m_pcEncoderCodingParameter->SpsMVC.m_uiViewCodingOrder[0] )
		{
			RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
			RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
			uiWrittenBytes += 4 + cExtBinDataAccessor.size();
		}

		cBinData.reset();
	}
//~JVT-W080
  //SEI {
  if( m_pcEncoderCodingParameter->getMultiviewSceneInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview scene information sei message
	  UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder ->writeMultiviewSceneInfoSEIMessage( &cExtBinDataAccessor ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &m_cBinDataStartCode ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes += 4 + cExtBinDataAccessor.size();
	  cBinData.reset();
  }
  if( m_pcEncoderCodingParameter->getMultiviewAcquisitionInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview acquisition information sei message
	  UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder ->writeMultiviewAcquisitionInfoSEIMessage( &cExtBinDataAccessor ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &m_cBinDataStartCode ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes += 4 + cExtBinDataAccessor.size();
	  cBinData.reset();
  }
  if( m_pcEncoderCodingParameter->getNestingSEIEnable() && m_pcEncoderCodingParameter->getSnapshotEnable() 
	  && m_pcEncoderCodingParameter->getCurentViewId() == 0 )
  {
   // add nesting sei message for view0
      UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder ->writeNestingSEIMessage( &cExtBinDataAccessor ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &m_cBinDataStartCode ) );
	  RNOK( m_pcWriteBitstreamToFile->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes += 4 + cExtBinDataAccessor.size();
	  cBinData.reset();
  }
//SEI }

  //===== determine parameters for required frame buffers =====
  for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
  {
    //auiMbX        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameWidth () >> 4;
    //auiMbY        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameHeight() >> 4;
    auiMbX        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameWidthInMbs();
    auiMbY        [uiLayer] = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFrameHeightInMbs();
    m_aauiCropping[uiLayer][0]     = 0;
    m_aauiCropping[uiLayer][1]     = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getHorPadding      ();
    m_aauiCropping[uiLayer][2]     = 0;
    m_aauiCropping[uiLayer][3]     = m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getVerPadding      ();
    m_apcWriteYuv[uiLayer]->setCrop(m_aauiCropping[uiLayer]);

    UInt  uiSize            = ((auiMbY[uiLayer]<<4)+2*YUV_Y_MARGIN)*((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN);
    auiPicSize    [uiLayer] = ((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN)*((auiMbY[uiLayer]<<4)+2*YUV_Y_MARGIN)*3/2;
    m_auiLumOffset[uiLayer] = ((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN)* YUV_Y_MARGIN   + YUV_X_MARGIN;  
    m_auiCbOffset [uiLayer] = ((auiMbX[uiLayer]<<3)+  YUV_X_MARGIN)* YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + uiSize; 
    m_auiCrOffset [uiLayer] = ((auiMbX[uiLayer]<<3)+  YUV_X_MARGIN)* YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + 5*uiSize/4;
    m_auiHeight   [uiLayer] =   auiMbY[uiLayer]<<4;
    m_auiWidth    [uiLayer] =   auiMbX[uiLayer]<<4;
    m_auiStride   [uiLayer] =  (auiMbX[uiLayer]<<4)+ 2*YUV_X_MARGIN;
  }

  //===== loop over frames =====
  for( uiFrame = 0; uiFrame < uiMaxFrame; uiFrame++ )
  {
    //===== get picture buffers and read original pictures =====
    for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
    {
      UInt  uiSkip = ( 1 << m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getTemporalResolution() );

      if( uiFrame % uiSkip == 0 )
      {
        RNOK( xGetNewPicBuffer( apcReconstructPicBuffer [uiLayer], uiLayer, auiPicSize[uiLayer] ) );
        RNOK( xGetNewPicBuffer( apcOriginalPicBuffer    [uiLayer], uiLayer, auiPicSize[uiLayer] ) );

        RNOK( m_apcReadYuv[uiLayer]->readFrame( *apcOriginalPicBuffer[uiLayer] + m_auiLumOffset[uiLayer],
                                                *apcOriginalPicBuffer[uiLayer] + m_auiCbOffset [uiLayer],
                                                *apcOriginalPicBuffer[uiLayer] + m_auiCrOffset [uiLayer],
                                                m_auiHeight [uiLayer],
                                                m_auiWidth  [uiLayer],
                                                m_auiStride [uiLayer] ) );
      }
      else
      {
        apcReconstructPicBuffer [uiLayer] = 0;
        apcOriginalPicBuffer    [uiLayer] = 0;
      }
    }

    //===== call encoder =====
    RNOK( m_pcH264AVCEncoder->process( cOutExtBinDataAccessorList,
                                       apcOriginalPicBuffer,
                                       apcReconstructPicBuffer,
                                       acPicBufferOutputList,
                                       acPicBufferUnusedList ) );

    //===== write and release NAL unit buffers =====
    UInt  uiBytesUsed = 0;
    RNOK( xWrite  ( cOutExtBinDataAccessorList, uiBytesUsed ) );
    uiWrittenBytes   += uiBytesUsed;
    
    //===== write and release reconstructed pictures =====
    for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
    {
      RNOK( xWrite  ( acPicBufferOutputList[uiLayer], uiLayer ) );
      RNOK( xRelease( acPicBufferUnusedList[uiLayer], uiLayer ) );
    }
  }

  //===== finish encoding =====
  UInt  uiNumCodedFrames = 0;
  Double  dHighestLayerOutputRate = 0.0;
  RNOK( m_pcH264AVCEncoder->finish( cOutExtBinDataAccessorList,
                                    acPicBufferOutputList,
                                    acPicBufferUnusedList,
                                    uiNumCodedFrames,
                                    dHighestLayerOutputRate ) );


  //===== write and release NAL unit buffers =====
  RNOK( xWrite  ( cOutExtBinDataAccessorList, uiWrittenBytes ) );

  //===== write and release reconstructed pictures =====
  for( uiLayer = 0; uiLayer < uiNumLayers; uiLayer++ )
  {
    RNOK( xWrite  ( acPicBufferOutputList[uiLayer], uiLayer ) );
    RNOK( xRelease( acPicBufferUnusedList[uiLayer], uiLayer ) );
  }


  //===== set parameters and output summary =====
  m_cEncoderIoParameter.nFrames = uiFrame;
  m_cEncoderIoParameter.nResult = 0;

  if( ! m_pcEncoderCodingParameter->getMVCmode() )
	{
		UChar   aucParameterSetBuffer[1000];
		BinData cBinData;
		cBinData.reset();
		cBinData.set( aucParameterSetBuffer, 1000 );

		ExtBinDataAccessor cExtBinDataAccessor;
		cBinData.setMemAccessor( cExtBinDataAccessor );
		m_pcH264AVCEncoder->SetVeryFirstCall();
		RNOK( m_pcH264AVCEncoder      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );
		RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
		RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
		uiWrittenBytes += 4 + cExtBinDataAccessor.size();
		cBinData.reset();
	}
//SEI {
  if( m_pcEncoderCodingParameter->getViewScalInfoSEIEnable() )
  {
    //view scalability information sei message
     UChar   aucParameterSetBuffer[1000];
     BinData cBinData;
     cBinData.reset();
     cBinData.set( aucParameterSetBuffer, 1000 );

     ExtBinDataAccessor cExtBinDataAccessor;
     cBinData.setMemAccessor( cExtBinDataAccessor );
     RNOK( m_pcH264AVCEncoder->writeViewScalInfoSEIMessage( &cExtBinDataAccessor ) );
     RNOK( m_pcWriteBitstreamToFile->writePacket       ( &m_cBinDataStartCode ) );
     RNOK( m_pcWriteBitstreamToFile->writePacket       ( &cExtBinDataAccessor ) );
     uiWrittenBytes += 4 + cExtBinDataAccessor.size();
     cBinData.reset();

  }
//SEI }
  if( m_pcWriteBitstreamToFile )
  {
    RNOK( m_pcWriteBitstreamToFile->uninit() );  
    RNOK( m_pcWriteBitstreamToFile->destroy() );  
  }

//SEI {
  if( m_pcEncoderCodingParameter->getViewScalInfoSEIEnable() )
  {
    RNOK    ( ViewScalableDealing() );
  }
//SEI }
  if( ! m_pcEncoderCodingParameter->getMVCmode() )
  {
	RNOK	( ScalableDealing() );
  }

  return Err::m_nOK;
}
コード例 #6
0
ErrVal
H264AVCEncoderTest::go()
{
  UInt                    uiWrittenBytes[MAX_LAYERS];
  const UInt              uiMaxFrame              = m_pcEncoderCodingParameter[0]->getTotalFrames();
  UInt                    uiNumViews             =  /*(m_pcEncoderCodingParameter[0]->getMVCmode() ? 1 :*/ m_pcEncoderCodingParameter[0]->getNumberOfLayers();
  UInt                    uiFrame=0;
  UInt                    uiView;
  UInt                    uiLayer;
  UInt                    auiMbX                  [MAX_LAYERS];
  UInt                    auiMbY                  [MAX_LAYERS];
  UInt                    auiPicSize              [MAX_LAYERS];
  PicBuffer*              apcOriginalPicBuffer    [MAX_LAYERS];//original pic
  PicBuffer*              apcReconstructPicBuffer [MAX_LAYERS];
  PicBufferList			  acPicBufferOutputList   [MAX_LAYERS];//rec pic
  PicBufferList           acPicBufferUnusedList   [MAX_LAYERS];
  ExtBinDataAccessorList  cOutExtBinDataAccessorList[MAX_LAYERS];
  Bool                    bMoreSets;

   //Buffers per evitar escriure a disc ELS DEFINIREM DINS DEL GO()
  ExtBinDataAccessorList	LayerBuffer[MAX_LAYERS];
  ExtBinDataAccessorList	StartCodeBuffer[MAX_LAYERS];
 
  UInt i=0;
  UInt j=0;

  //===== initialization =====
  for(uiView=0;uiView<uiNumViews;uiView++){ 
	RNOK( m_pcH264AVCEncoder[uiView]->init( m_pcEncoderCodingParameter[uiView] ) ); 
  }
  

  string ip_adress = m_pcEncoderCodingParameter[0]->getIPAdress();
  char adress[15];
  strcpy(adress,ip_adress.c_str());
  
  m_apcRtpPacker->init(adress,m_pcEncoderCodingParameter[0]->getUDPPort(),true);
  
  m_apcRtpPacker->setPeriod((int)m_pcEncoderCodingParameter[0]->getMaximumFrameRate());


  if(isVerbose)
	printf("Inici go()\n");

  //===== write parameter sets =====
  
  for(i=0;i<uiNumViews;i++){
	  //printf("Iteracio: %d\n",j);
	  for( bMoreSets = true; bMoreSets;  )
	  {
		 //printf("Moresets\n");
		  
		UChar   aucParameterSetBuffer[1000];
		BinData cBinData;
		cBinData.reset();
		cBinData.set( aucParameterSetBuffer, 1000 );

		ExtBinDataAccessor cExtBinDataAccessor;
		cBinData.setMemAccessor( cExtBinDataAccessor );

		
		//Pot estar aqui el problema dels fitxers d'entrada?
			
		RNOK( m_pcH264AVCEncoder[i]      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );
		
		//bMoreSets=true;
		//RNOK( m_pcH264AVCEncoder[1]      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );

			if( m_pcH264AVCEncoder[i]->getScalableSeiMessage()&& i==0)
				{
				//printf("getScalableSeiMessage a Moresets per a Encoder %d\n",j);
				for(j=0;j<uiNumViews;j++){
					//RNOK( m_pcWriteBitstreamToFile[i]->writePacket       ( &m_cBinDataStartCode ) );
					//RNOK( m_pcWriteBitstreamToFile[i]->writePacket       ( &cExtBinDataAccessor ) );
					uiWrittenBytes[j] += 4 + cExtBinDataAccessor.size();
				}
				xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
				/*RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &m_cBinDataStartCode ) );
				RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &cExtBinDataAccessor ) );*/
				//OutputBuffer.push_back(&cExtBinDataAccessor);
				//if(i==2){bMoreSets=false;
			
		}
		cBinData.reset();
	  }
   bMoreSets = true;
  }

  //m_pcH264AVCEncoder[1]=m_pcH264AVCEncoder[0]; //PER AIxÒ APUNTA AL MATEIX PICENCODER ??????
  if(isVerbose)
	printf("Final de bulce moreSets\n------------------------\n");

//JVT-W080, PDS SEI message
  if( m_pcEncoderCodingParameter[1]->getMVCmode() && m_pcEncoderCodingParameter[1]->getPdsEnable() ){
	  if(isVerbose)
		  printf("\nJVT-W080, PDS SEI per la view 1\n");
  }
	if( m_pcEncoderCodingParameter[0]->getMVCmode() && m_pcEncoderCodingParameter[0]->getPdsEnable() )
	{
		if(isVerbose)
			printf("JVT-W080, PDS SEI\n");
		//write SEI message
		UChar   aucParameterSetBuffer[1000];
		BinData cBinData;
		cBinData.reset();
		cBinData.set( aucParameterSetBuffer, 1000 );

		ExtBinDataAccessor cExtBinDataAccessor;
		cBinData.setMemAccessor( cExtBinDataAccessor );

		const UInt uiSPSId = 0; //currently only one SPS with SPSId = 0
		//UInt uiNumView       = m_pcEncoderCodingParameter[0]->SpsMVC.getNumViewMinus1()+1;
		UInt* num_refs_list0_anc = new UInt [uiNumViews];
		UInt* num_refs_list1_anc = new UInt [uiNumViews];
		UInt* num_refs_list0_nonanc = new UInt [uiNumViews];
		UInt* num_refs_list1_nonanc = new UInt [uiNumViews];

		for(uiView = 0; uiView < uiNumViews; uiView++ )
		{
			num_refs_list0_anc[uiView]    = m_pcEncoderCodingParameter[0]->SpsMVC.getNumAnchorRefsForListX( m_pcEncoderCodingParameter[0]->SpsMVC.getViewCodingOrder()[uiView], 0 );
			num_refs_list1_anc[uiView]    = m_pcEncoderCodingParameter[0]->SpsMVC.getNumAnchorRefsForListX( m_pcEncoderCodingParameter[0]->SpsMVC.getViewCodingOrder()[uiView], 1 );
			num_refs_list0_nonanc[uiView] = m_pcEncoderCodingParameter[0]->SpsMVC.getNumNonAnchorRefsForListX( m_pcEncoderCodingParameter[0]->SpsMVC.getViewCodingOrder()[uiView], 0 );
			num_refs_list1_nonanc[uiView] = m_pcEncoderCodingParameter[0]->SpsMVC.getNumNonAnchorRefsForListX( m_pcEncoderCodingParameter[0]->SpsMVC.getViewCodingOrder()[uiView], 1 );		  
		}
//#define HELP_INFOR
#ifdef  HELP_INFOR
		printf("\n");
		for( UInt i = 0; i < uiNumView; i++ )
		{
			printf(" num_refs_list0_anchor: %d\tnum_refs_list0_nonanchor: %d\n num_refs_list1_anchor: %d\tnum_refs_list1_nonanchor: %d\n", num_refs_list0_anc[i], num_refs_list1_anc[i], num_refs_list0_nonanc[i], num_refs_list1_nonanc[i] );
		}
#endif

		UInt uiInitialPDIDelayAnc = m_pcEncoderCodingParameter[0]->getPdsInitialDelayAnc();
		UInt uiInitialPDIDelayNonAnc = m_pcEncoderCodingParameter[0]->getPdsInitialDelayNonAnc();

		if( uiInitialPDIDelayAnc < 2 )
			uiInitialPDIDelayAnc  = 2;
		if( uiInitialPDIDelayNonAnc < 2 )
			uiInitialPDIDelayNonAnc  = 2;

		for(uiView = 0; uiView < uiNumViews; uiView++ )
		{	
			if(isVerbose)
				printf("writePDSSEIMessage for view [%d]\n",uiView);
			
			RNOK( m_pcH264AVCEncoder[uiView]->writePDSSEIMessage( &cExtBinDataAccessor
			                                           , uiSPSId
			                                           , uiNumViews
			                                           , num_refs_list0_anc
																								 , num_refs_list1_anc
			                                           , num_refs_list0_nonanc
																								 , num_refs_list1_nonanc
																								 , uiInitialPDIDelayAnc
																								 , uiInitialPDIDelayNonAnc
																								) 
			);
		}

		delete[] num_refs_list0_anc;
		delete[] num_refs_list1_anc;
		delete[] num_refs_list0_nonanc;
		delete[] num_refs_list1_nonanc;
		num_refs_list0_anc = NULL;
		num_refs_list1_anc = NULL;
		num_refs_list0_nonanc = NULL;
		num_refs_list1_nonanc = NULL;
	  
		if( m_pcEncoderCodingParameter[0]->getCurentViewId() == m_pcEncoderCodingParameter[0]->SpsMVC.m_uiViewCodingOrder[0] )
		{

			if(isVerbose)
				printf("m_pcEncoderCodingParameter[0]->getCurentViewId() == m_pcEncoderCodingParameter[0]->SpsMVC.m_uiViewCodingOrder[0]\n");
			
			for(uiView=0;uiView<uiNumViews;uiView++){
				//RNOK( m_pcWriteBitstreamToFile[i]->writePacket       ( &m_cBinDataStartCode ) );
				//RNOK( m_pcWriteBitstreamToFile[i]->writePacket       ( &cExtBinDataAccessor ) );
				uiWrittenBytes[uiView] += 4 + cExtBinDataAccessor.size();
			}
			xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
			//RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &m_cBinDataStartCode ) );
			//RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &cExtBinDataAccessor ) );
			//OutputBuffer.push_back(&cExtBinDataAccessor);
				
			
		}

		cBinData.reset();
	}
//~JVT-W080
  //SEI {
	if( m_pcEncoderCodingParameter[1]->getMultiviewSceneInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview scene information sei message
	  if(isVerbose)
		  printf("getMultiviewSceneInfoSEIEnable a la view 1\n");
	}
  if( m_pcEncoderCodingParameter[0]->getMultiviewSceneInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview scene information sei message
	  if(isVerbose)
		  printf("getMultiviewSceneInfoSEIEnable\n");
	  
	  UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder[0] ->writeMultiviewSceneInfoSEIMessage( &cExtBinDataAccessor ) );
	  //Hardocejat a 2, caldrà fer-ho amb NUMLayers
		for(i=0;i<uiNumViews;i++){
		  //RNOK( m_pcWriteBitstreamToFile[i]->writePacket( &m_cBinDataStartCode ) );
		  //RNOK( m_pcWriteBitstreamToFile[i]->writePacket( &cExtBinDataAccessor ) );
		  uiWrittenBytes[i] += 4 + cExtBinDataAccessor.size();
		}
		xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
		//RNOK( m_pcWriteBitstreamToOutput->writePacket( &m_cBinDataStartCode ) );
		//RNOK( m_pcWriteBitstreamToOutput->writePacket( &cExtBinDataAccessor ) );
		//OutputBuffer.push_back(&cExtBinDataAccessor);
	  
	  cBinData.reset();
  }

  if( m_pcEncoderCodingParameter[1]->getMultiviewAcquisitionInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview acquisition information sei message
	  if(isVerbose)
		  printf("getMultiviewAcquisitionInfoSEIEnable a la view 1\n");
  }

  if( m_pcEncoderCodingParameter[0]->getMultiviewAcquisitionInfoSEIEnable() ) // SEI JVT-W060
  {
	  // Multiview acquisition information sei message
	  if(isVerbose)
		  printf("getMultiviewAcquisitionInfoSEIEnable\n");
	  
	  UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder[0] ->writeMultiviewAcquisitionInfoSEIMessage( &cExtBinDataAccessor ) );
	  //Hardocejat a 2, caldrà fer-ho amb NUMLayers
	for(i=0;i<uiNumViews;i++){
	  //RNOK( m_pcWriteBitstreamToFile[i]->writePacket( &m_cBinDataStartCode ) );
	  //RNOK( m_pcWriteBitstreamToFile[i]->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes[i] += 4 + cExtBinDataAccessor.size();
	}
	xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
	 //RNOK( m_pcWriteBitstreamToOutput->writePacket( &m_cBinDataStartCode ) );
	 //RNOK( m_pcWriteBitstreamToOutput->writePacket( &cExtBinDataAccessor ) );
	 //OutputBuffer.push_back(&cExtBinDataAccessor);
	  
	  
	  cBinData.reset();
  }

  if( m_pcEncoderCodingParameter[1]->getNestingSEIEnable() && m_pcEncoderCodingParameter[1]->getSnapshotEnable() 
	  && m_pcEncoderCodingParameter[1]->getCurentViewId() == 0 )
  {
   // add nesting sei message for view0
	  if(isVerbose)
		  printf("getNestingSEIEnable a la view 1\n");
  }

  if( m_pcEncoderCodingParameter[0]->getNestingSEIEnable() && m_pcEncoderCodingParameter[0]->getSnapshotEnable() 
	  && m_pcEncoderCodingParameter[0]->getCurentViewId() == 0 )
  {
   // add nesting sei message for view0
	 if(isVerbose)
		 printf("getNestingSEIEnable\n");
      UChar aucParameterSetBuffer[1000];
      BinData cBinData;
      cBinData.reset();
      cBinData.set( aucParameterSetBuffer, 1000 );
      ExtBinDataAccessor cExtBinDataAccessor;
      cBinData.setMemAccessor( cExtBinDataAccessor );
	  RNOK( m_pcH264AVCEncoder[0] ->writeNestingSEIMessage( &cExtBinDataAccessor ) );
	  //Hardocejat a 2, caldrà fer-ho amb NUMLayers
	for(i=0;i<uiNumViews;i++){
	  //RNOK( m_pcWriteBitstreamToFile[i]->writePacket( &m_cBinDataStartCode ) );
	  //RNOK( m_pcWriteBitstreamToFile[i]->writePacket( &cExtBinDataAccessor ) );
	  uiWrittenBytes[i] += 4 + cExtBinDataAccessor.size();
	}
	xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
	//RNOK( m_pcWriteBitstreamToOutput->writePacket( &m_cBinDataStartCode ) );
	//RNOK( m_pcWriteBitstreamToOutput->writePacket( &cExtBinDataAccessor ) );
	//OutputBuffer.push_back(&cExtBinDataAccessor);
	  
	  cBinData.reset();
  }
//SEI }

  //===== determine parameters for required frame buffers =====
  for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ )
  {
    //auiMbX        [uiLayer] = m_pcEncoderCodingParameter[0]->getLayerParameters( uiLayer ).getFrameWidth () >> 4;
    //auiMbY        [uiLayer] = m_pcEncoderCodingParameter[0]->getLayerParameters( uiLayer ).getFrameHeight() >> 4;
    auiMbX        [uiLayer] = m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( uiLayer ).getFrameWidthInMbs();
    auiMbY        [uiLayer] = m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( uiLayer ).getFrameHeightInMbs();
    m_aauiCropping[uiLayer][0]     = 0;
    m_aauiCropping[uiLayer][1]     = m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( uiLayer ).getHorPadding      ();
    m_aauiCropping[uiLayer][2]     = 0;
    m_aauiCropping[uiLayer][3]     = m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( uiLayer ).getVerPadding      ();
    
	if(!m_pcEncoderCodingParameter[uiLayer]->isParallel()){
		m_apcWriteYuv[uiLayer]->setCrop(m_aauiCropping[uiLayer]);
	}

    UInt  uiSize            = ((auiMbY[uiLayer]<<4)+2*YUV_Y_MARGIN)*((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN);
    auiPicSize    [uiLayer] = ((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN)*((auiMbY[uiLayer]<<4)+2*YUV_Y_MARGIN)*3/2;
    m_auiLumOffset[uiLayer] = ((auiMbX[uiLayer]<<4)+2*YUV_X_MARGIN)* YUV_Y_MARGIN   + YUV_X_MARGIN;  
    m_auiCbOffset [uiLayer] = ((auiMbX[uiLayer]<<3)+  YUV_X_MARGIN)* YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + uiSize; 
    m_auiCrOffset [uiLayer] = ((auiMbX[uiLayer]<<3)+  YUV_X_MARGIN)* YUV_Y_MARGIN/2 + YUV_X_MARGIN/2 + 5*uiSize/4;
    m_auiHeight   [uiLayer] =   auiMbY[uiLayer]<<4;
    m_auiWidth    [uiLayer] =   auiMbX[uiLayer]<<4;
    m_auiStride   [uiLayer] =  (auiMbX[uiLayer]<<4)+ 2*YUV_X_MARGIN;

	//printf("Dades dels parametres del Layer %d: auiMbX=%d auiMbY=%d\n",uiLayer,auiMbX,auiMbY);
  }

 
  

  //
  //Auqui ja pot anar la primera escriptura a oputput.264
  //

  //===== loop over frames =====
  printf("\n---------------------\nPreparation of the Encoder is finsihed. Let's start with the encoding.\n---------------------\n");
  printf("Total Frames: %d\n---------------------\n\n",uiMaxFrame);
system("pause");
printf("\n");

 // boost::thread workerThread(&H264AVCEncoderTest::xProcessingThread);
 // boost::thread workerThread(&xProcessingThread);

  //workerThread.join();

  //start(0,uiFrame,uiMaxFrame,uiLayer,auiPicSize[uiLayer],uiWrittenBytes[uiLayer],7,8);
  //start(1);
  //join(0);
  //join(1);

    for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ )
    {
		if(isVerbose)
			printf("Prova de processar la view %d en una funcio a part.\n",uiLayer);
		
		xSetProcessingInfo(uiFrame,uiMaxFrame,uiLayer);
		processView(m_apcProcessingInfo,auiPicSize[uiLayer],uiWrittenBytes[uiLayer],cOutExtBinDataAccessorList[uiLayer],apcOriginalPicBuffer[uiLayer],apcReconstructPicBuffer[uiLayer],acPicBufferOutputList[uiLayer],acPicBufferUnusedList[uiLayer]);
		
		//system("pause");
	  }

	//printf("pause");
	join();
	

	if(isVerbose){
		printf("Han acabat els dos threads\n ");
		printf("pause");
	}
  
//Tot això va dins del thread processView()
 // for( uiFrame = 0; uiFrame < uiMaxFrame; uiFrame++ )
 // {
	//  m_apcRtpPacker->increaseTimeStamp();
	//  printf("\nFrame: %d\n",uiFrame);
	//   //system("pause");
	//  
 //   //===== get picture buffers and read original pictures =====
 //   for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ )
 //   {
	//	//if(uiFrame>=3||uiLayer==0){ //La condició uiFrame>2 hem de deshardcodejar-ho per RecPicBuffer->uiMaxFramesInDPB
	//		//printf("\n\n//////////\nFIns a la frame 2 no imprimirem la Layer %d\n///////////\n\n",uiLayer);
	//	
	//	
	//		  UInt  uiSkip = ( 1 << m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( 0 ).getTemporalResolution() );
	//		  //UInt  uiSkip = ( 1 << m_pcEncoderCodingParameter[uiLayer]->getLayerParameters( uiLayer ).getTemporalResolution() );
	//		  
	//		  //
	//		  //LLEGIM EL FRAME uiFrame PER LA VISTA uiLayer
	//		  //

	//		  if( uiFrame % uiSkip == 0 )
	//		  {
	//			RNOK( xGetNewPicBuffer( apcReconstructPicBuffer [uiLayer], uiLayer, auiPicSize[uiLayer] ) );
	//			RNOK( xGetNewPicBuffer( apcOriginalPicBuffer    [uiLayer], uiLayer, auiPicSize[uiLayer] ) );
	//			
	//			//printf("Reading Layer %d of frame %d\n",uiLayer,uiFrame);
	//			//m_apcReadYuv[uiLayer]->m_cFile.tell();
	//			RNOK( m_apcReadYuv[uiLayer]->readFrame( *apcOriginalPicBuffer[uiLayer] + m_auiLumOffset[uiLayer],
	//													*apcOriginalPicBuffer[uiLayer] + m_auiCbOffset [uiLayer],
	//													*apcOriginalPicBuffer[uiLayer] + m_auiCrOffset [uiLayer],
	//													m_auiHeight [uiLayer],
	//													m_auiWidth  [uiLayer],
	//													m_auiStride [uiLayer] ) );

	//			//printf("Frame %d, Layer %d, tamany original:%s\n",uiFrame,uiLayer,apcOriginalPicBuffer[uiLayer]);
	//			
	//		  }
	//		  else
	//		  {
	//			if(isVerbose)
	//				printf("Hi ha Hagut un SKIP a la part de readFrame()\n");

	//			apcReconstructPicBuffer [uiLayer] = 0;
	//			apcOriginalPicBuffer    [uiLayer] = 0;		
	//		  }
	//		  

	//		  //
	//		  //PROCESSEM EL FRAME uiFrame PER LA VISTA uiLayer
	//		  //

	//		  if(isVerbose)
	//			  printf("View %d\t",uiLayer);

	//		   RNOK( m_pcH264AVCEncoder[uiLayer]->process( cOutExtBinDataAccessorList[uiLayer],
	//										   apcOriginalPicBuffer[uiLayer],
	//										   apcReconstructPicBuffer[uiLayer],
	//										   &acPicBufferOutputList[uiLayer],
	//										   &acPicBufferUnusedList[uiLayer] ) );


	//		   //
	//		   //ESCRIVIM EL FRAME uiFrame PER LA VISTA uiLayer A DIFERENTS ARXIUS I BUFFERS(OUTPUT, REC, ETC...)
	//		   //

	//			//printf("Writing layer %d frame %d\n",uiLayer,uiFrame);
	//			UInt  uiBytesUsed = 0;
	//			if(m_pcEncoderCodingParameter[0]->isDebug()){
	//				if(isVerbose)
	//					printf("Write per debug\n");				
	//				RNOK( xWrite  ( cOutExtBinDataAccessorList[uiLayer],uiBytesUsed) );
	//			}
	//			else{
	//				RNOK(xSend(cOutExtBinDataAccessorList[uiLayer]));
	//				
	//			}
	//			
	//			//m_apcUDPController->send("Test");
	//			
	//					
	//			uiWrittenBytes[uiLayer]   += uiBytesUsed;

	//		  
	//			//printf("Releasing layer %d frame %d\n",uiLayer,uiFrame);

	//			
	//			//S'Omple els fitxers c:/inputs/rec_X.yuv
	//			if(!m_pcEncoderCodingParameter[0]->isParallel()){
	//				printf("Write per No Parallel\n");
	//				RNOK( xWrite  ( acPicBufferOutputList[uiLayer], uiLayer ) );
	//			}
	//			else
	//			{
	//				RNOK( xRelease( acPicBufferOutputList[uiLayer], uiLayer ) );
	//			}
	//			//printf("Fem el xRelease del view %d\n",uiLayer);
	//			RNOK( xRelease( acPicBufferUnusedList[uiLayer], uiLayer ) );
	//			//printf("Tamany del Buffer de REC[%d]=%d\n",uiLayer,acPicBufferOutputList[uiLayer].size());
	//			
	//	//}//endif
	//	
	//	
	//		
	//}//endfor vista

	//		//
	//		//Després de processar dues vistes. Augmentem el Timestamp
	//		//

	//
	//
	//	
 // }//endfor frame


  //escriure a output al final de tot
  //while(LayerBuffer[0].size()&&LayerBuffer[1].size())
  //{
	 // for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ )
	 // {
		//  
	 // }
  //}
	
  //Comparem tamany del buffer de Output

  if(isVerbose){
	  printf("Tamany del Buffer de REC[0]=%d\n",acPicBufferOutputList[0].size());
	  printf("Tamany del Buffer de REC[1]=%d\n",uiLayer,acPicBufferOutputList[1].size());
	  printf("Tamany del Buffer de acPicBufferUnusedList[0]=%d\n",acPicBufferUnusedList[0].size());
	  printf("Tamany del Buffer de acPicBufferUnusedList[1]=%d\n\n",acPicBufferUnusedList[1].size());
  }

    //Enviar missatge de final de Transmissió
  /*printf("Enviem el final de Transmissió\n");
  
  m_apcRtpPacker->endTransmission();*/

  //===== finish encoding =====
  for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ )
  {
	  UInt  uiNumCodedFrames = 0;
	  Double  dHighestLayerOutputRate = 0.0;
	  if(isVerbose)
		  printf("Finishing encoding view %d\n",uiLayer);

	  RNOK( m_pcH264AVCEncoder[uiLayer]->finish( cOutExtBinDataAccessorList[uiLayer],
										acPicBufferOutputList,
										acPicBufferUnusedList,
										uiNumCodedFrames,
										dHighestLayerOutputRate ) );


	  //===== write and release NAL unit buffers =====
	  if(m_pcEncoderCodingParameter[0]->isDebug()){
		  RNOK( xWrite  ( cOutExtBinDataAccessorList[uiLayer], uiWrittenBytes[uiLayer]) );
	  }
	  else{
		  RNOK(xSend(cOutExtBinDataAccessorList[uiLayer]));
	  }

  

  }

 
  


  //printf("Tamany del Buffer de REC[0]=%d\n",acPicBufferOutputList[0].size());
  //printf("Tamany del Buffer de REC[1]=%d\n",acPicBufferOutputList[1].size());
  //printf("Tamany del Buffer de acPicBufferUnusedList[0]=%d\n",acPicBufferUnusedList[0].size());
  //printf("Tamany del Buffer de acPicBufferUnusedList[1]=%d\n",acPicBufferUnusedList[1].size());

  for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ )
  {
	  if(isVerbose)
		  printf("Release dels auxiliars\n");
	//printf("Tamany del Buffer de REC[%d]=%d\n",uiLayer,acPicBufferOutputList[uiLayer].size());

	//printf("Releasing Oputput and Unused Buffers for view %d\n",uiLayer);
    if(!m_pcEncoderCodingParameter[0]->isParallel()){
		RNOK( xWrite  ( acPicBufferOutputList[uiLayer], uiLayer ) );
		//printf("xWrite fet\n");
	}
	
	//printf("Tamany del Buffer de acPicBufferUnusedList[%d]=%d\n",uiLayer,acPicBufferUnusedList[uiLayer].size());
    RNOK( xRelease( acPicBufferUnusedList[uiLayer], uiLayer ) );
	if(isVerbose)
		printf("xRelease[%d] fet\n",uiLayer);
  }

  //printf("Set parameters");
  //===== set parameters and output summary =====
  m_cEncoderIoParameter.nFrames = uiFrame;
  m_cEncoderIoParameter.nResult = 0;

  for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ ){
	  if(isVerbose)
		  printf("Segon bucle\n");
	if( ! m_pcEncoderCodingParameter[uiLayer]->getMVCmode() )
	{
		if(isVerbose)
			printf("Entrem al bucle\n");
		//printf("m_pcEncoderCodingParameter[uiLayer]->getMVCmode()\n");
		UChar   aucParameterSetBuffer[1000];
		BinData cBinData;
		cBinData.reset();
		cBinData.set( aucParameterSetBuffer, 1000 );

		ExtBinDataAccessor cExtBinDataAccessor;
		cBinData.setMemAccessor( cExtBinDataAccessor );
		m_pcH264AVCEncoder[uiLayer]->SetVeryFirstCall();
		RNOK( m_pcH264AVCEncoder[uiLayer]      ->writeParameterSets( &cExtBinDataAccessor, bMoreSets) );
		//Hardocejat a 2, caldrà fer-ho amb NUMLayers
		
		for(i = 0; i < uiNumViews; i++){
			//RNOK( m_pcWriteBitstreamToFile[uiLayer]->writePacket       ( &m_cBinDataStartCode ) );
			//RNOK( m_pcWriteBitstreamToFile[uiLayer]->writePacket       ( &cExtBinDataAccessor ) );
			uiWrittenBytes[i] += 4 + cExtBinDataAccessor.size();
		}
		
		xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
		//RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &m_cBinDataStartCode ) );
		//RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &cExtBinDataAccessor ) );
	
		cBinData.reset();
	}
  }
//SEI {
  for( uiLayer = 0; uiLayer < uiNumViews; uiLayer++ ){
	    
	  if( m_pcEncoderCodingParameter[uiLayer]->getViewScalInfoSEIEnable() )
	  {
		  
		 //printf("m_pcEncoderCodingParameter[uiLayer]->getViewScalInfoSEIEnable()\n");
		//view scalability information sei message
		 UChar   aucParameterSetBuffer[1000];
		 BinData cBinData;
		 cBinData.reset();
		 cBinData.set( aucParameterSetBuffer, 1000 );

		 ExtBinDataAccessor cExtBinDataAccessor;
		 cBinData.setMemAccessor( cExtBinDataAccessor );
		 RNOK( m_pcH264AVCEncoder[uiLayer]->writeViewScalInfoSEIMessage( &cExtBinDataAccessor ) );
		 //Hardocejat a 2, caldrà fer-ho amb NUMLayers
		for(i = 0; i < uiNumViews; i++){
		 //RNOK( m_pcWriteBitstreamToFile[uiLayer]->writePacket       ( &m_cBinDataStartCode ) );
		 //RNOK( m_pcWriteBitstreamToFile[uiLayer]->writePacket       ( &cExtBinDataAccessor ) );
		 uiWrittenBytes[i] += 4 + cExtBinDataAccessor.size();
		}
		xWriteInit(cExtBinDataAccessor,m_pcEncoderCodingParameter[0]->isDebug());
		 //RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &m_cBinDataStartCode ) );
		 //RNOK( m_pcWriteBitstreamToOutput->writePacket       ( &cExtBinDataAccessor ) );
		 
		 cBinData.reset();

	  }
  }
//SEI }
  
  if( m_pcEncoderCodingParameter[0]->isDebug() )
  {
	   
	 //printf("m_pcWriteBitstreamToOutput\n");
    /*RNOK( m_pcWriteBitstreamToFile[0]->uninit() );  
    RNOK( m_pcWriteBitstreamToFile[0]->destroy() );
	RNOK( m_pcWriteBitstreamToFile[1]->uninit() );  
    RNOK( m_pcWriteBitstreamToFile[1]->destroy() );*/
	RNOK( m_pcWriteBitstreamToOutput->uninit() );  
	//printf("m_pcWriteBitstreamToOutput-uninit()\n");
    RNOK( m_pcWriteBitstreamToOutput->destroy() );
	//printf("m_pcWriteBitstreamToOutput-destroy()\n");
  }

//SEI {
  if(isVerbose)
	  printf("Check m_pcEncoderCodingParameter[0]->getViewScalInfoSEIEnable()\n");
  
  if( m_pcEncoderCodingParameter[0]->getViewScalInfoSEIEnable() )
  {
	    if(isVerbose)
			printf("ViewScalableDealing\n");
	  //printf("m_pcEncoderCodingParameter[0]->getViewScalInfoSEIEnable()\n");
    RNOK    ( ViewScalableDealing() );
  }
//SEI }
   if(isVerbose)
	   printf("Check m_pcEncoderCodingParameter[0]->getMVCmode()\n");
  if( ! m_pcEncoderCodingParameter[0]->getMVCmode() )
  {
	  
	  if(isVerbose)
		  printf("ScalableDealing\n");
	  //printf("m_pcEncoderCodingParameter[0]->getMVCmode()\n");
	RNOK	( ScalableDealing() );
  }

  //==== TAncar Assemlber =====
	//RNOKR( pcAssembler->destroy (),               -6 );



if(!m_pcEncoderCodingParameter[0]->isDebug())
	m_apcRtpPacker->destroy();


  return Err::m_nOK;
}