예제 #1
0
ErrVal H264AVCEncoderTest::init( Int    argc,
                                 Char** argv )
{
  //===== create and read encoder parameters =====
  RNOK( EncoderCodingParameter::create( m_pcEncoderCodingParameter ) );
  if( Err::m_nOK != m_pcEncoderCodingParameter->init( argc, argv, m_cEncoderIoParameter.cBitstreamFilename ) )
  {
    m_pcEncoderCodingParameter->printHelpMVC(argc, argv);
    return -3;
  }
  m_cEncoderIoParameter.nResult = -1;


  //===== init instances for reading and writing yuv data =====
  UInt uiNumberOfLayers = m_pcEncoderCodingParameter->getMVCmode() ? 1 : m_pcEncoderCodingParameter->getNumberOfLayers();
  for( UInt uiLayer = 0; uiLayer < uiNumberOfLayers; uiLayer++ )
  {
    h264::LayerParameters&  rcLayer = m_pcEncoderCodingParameter->getLayerParameters( uiLayer );

    RNOKS( WriteYuvToFile::create( m_apcWriteYuv[uiLayer], rcLayer.getOutputFilename() ) );
    RNOKS( ReadYuvFile   ::create( m_apcReadYuv [uiLayer] ) );  

    RNOKS( m_apcReadYuv[uiLayer]->init( rcLayer.getInputFilename(),
                                        rcLayer.getFrameHeight  (),
                                        rcLayer.getFrameWidth   () ) ); 
  }


  //===== init bitstream writer =====
  if( m_pcEncoderCodingParameter->getMVCmode() )
  {
  //SEI {
	if( m_pcEncoderCodingParameter->getViewScalInfoSEIEnable() )
	{
    m_cWriteToBitFileTempName                 = m_cEncoderIoParameter.cBitstreamFilename + ".temp";
    m_cWriteToBitFileName                     = m_cEncoderIoParameter.cBitstreamFilename;
    m_cEncoderIoParameter.cBitstreamFilename  = m_cWriteToBitFileTempName;
	}
  //SEI }
    RNOKS( WriteBitstreamToFile::create   ( m_pcWriteBitstreamToFile ) );
    RNOKS( m_pcWriteBitstreamToFile->init ( m_cEncoderIoParameter.cBitstreamFilename ) );  
  }
  else
  {
    m_cWriteToBitFileTempName                 = m_cEncoderIoParameter.cBitstreamFilename + ".temp";
    m_cWriteToBitFileName                     = m_cEncoderIoParameter.cBitstreamFilename;
    m_cEncoderIoParameter.cBitstreamFilename  = m_cWriteToBitFileTempName;
    RNOKS( WriteBitstreamToFile::create   ( m_pcWriteBitstreamToFile ) );
    RNOKS( m_pcWriteBitstreamToFile->init ( m_cEncoderIoParameter.cBitstreamFilename ) );  
  }

  //===== create encoder instance =====
  RNOK( h264::CreaterH264AVCEncoder::create( m_pcH264AVCEncoder ) );


  //===== set start code =====
  m_aucStartCodeBuffer[0] = 0;
  m_aucStartCodeBuffer[1] = 0;
  m_aucStartCodeBuffer[2] = 0;
  m_aucStartCodeBuffer[3] = 1;
  m_cBinDataStartCode.reset ();
  m_cBinDataStartCode.set   ( m_aucStartCodeBuffer, 4 );

  // Extended NAL unit priority is enabled by default, since 6-bit short priority
  // is incompatible with extended 4CIF Palma test set.  Change value to false
  // to enable short ID.
  m_pcEncoderCodingParameter->setExtendedPriorityId( true );

  // Example priority ID assignment: (a) spatial, (b) temporal, (c) quality
  // Other priority assignments can be created by adjusting the mapping table.
  // (J. Ridge, Nokia)
  if ( !m_pcEncoderCodingParameter->getExtendedPriorityId() )
  {
    UInt  uiPriorityId = 0;
    for( UInt uiLayer = 0; uiLayer < m_pcEncoderCodingParameter->getNumberOfLayers(); uiLayer++ )
    {
        UInt uiBitplanes;
        if ( m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getFGSMode() > 0 )
        {
          uiBitplanes = MAX_QUALITY_LEVELS - 1;  
        } else {
          uiBitplanes = (UInt) m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getNumFGSLayers();
          if ( m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getNumFGSLayers() > (Double) uiBitplanes)
          {
            uiBitplanes++;
          }
        }
 /*       for ( UInt uiTempLevel = 0; uiTempLevel <= m_pcEncoderCodingParameter->getLayerParameters( uiLayer ).getDecompositionStages(); uiTempLevel++ )
        {
            for ( UInt uiQualLevel = 0; uiQualLevel <= uiBitplanes; uiQualLevel++ )
            {
                m_pcEncoderCodingParameter->setSimplePriorityMap( uiPriorityId++, uiTempLevel, uiLayer, uiQualLevel );
                AOF( uiPriorityId > ( 1 << PRI_ID_BITS ) );
            }
        }
 JVT-S036  */
    }

    m_pcEncoderCodingParameter->setNumSimplePris( uiPriorityId );
  }

  return Err::m_nOK;
}
예제 #2
0
ErrVal H264AVCEncoderTest::init( Int    argc,
                                 Char** argv )
{
  //===== define the nuimber of views =========
  UInt uiNumberOfViews = atoi(argv[3]);
  //===== create and read encoder parameters =====

  isVerbose = false;
  UInt i=0;
  UInt uiView=0;

  nFreeThread=0;


  xSetProcessingInfo(0,0,0);

  RtpPacker::create(m_apcRtpPacker);
  
 
  for(i=0;i<uiNumberOfViews;i++){
	  RNOK( EncoderCodingParameter::create( m_pcEncoderCodingParameter[i] ) );
	  itoa(i,argv[3],10);
	  
	  if( Err::m_nOK != m_pcEncoderCodingParameter[i]->init( argc, argv, m_cEncoderIoParameter.cBitstreamFilename ) )
	  {
		printf("Error en argv\n");
		m_pcEncoderCodingParameter[i]->printHelpMVC(argc, argv);
		return -3;
	  }
	  
	  m_cEncoderIoParameter.nResult = -1;
  }

   //===Create Output File writer========
  
  isVerbose = m_pcEncoderCodingParameter[0]->isVerbose();
  
  if(m_pcEncoderCodingParameter[0]->isDebug()){

	  //El output file el fem servir per comprovar que la sortida sigui correcta.
	  if(isVerbose)
		printf("Creem el fitxer output per debug\n");
	  
	  RNOKS( WriteBitstreamToFile::create   ( m_pcWriteBitstreamToOutput ) );
	  RNOKS( m_pcWriteBitstreamToOutput->init ( "c:/inputs/output_parallel.264" ) );  
  
  }

  
  //===== init instances for reading and writing yuv data =====
  if(uiNumberOfViews!=m_pcEncoderCodingParameter[0]->getNumberOfLayers()){
	printf("\nError de paràmetres.\n El nombre de vistes no concorda\n.");
	exit(0);
  }
  
  
  
  for( uiView = 0; uiView < uiNumberOfViews; uiView++ )
  {
	  h264::LayerParameters&  rcLayer = m_pcEncoderCodingParameter[uiView]->getLayerParameters( uiView );
  
	  if(!m_pcEncoderCodingParameter[uiView]->isParallel()){
		RNOKS( WriteYuvToFile::create( m_apcWriteYuv[uiView], rcLayer.getOutputFilename() ) );
	  }

	  
	RNOKS( ReadYuvFile   ::create( m_apcReadYuv [uiView] ) );  

	RNOKS( m_apcReadYuv[uiView]->init( rcLayer.getInputFilename(),
                                        rcLayer.getFrameHeight  (),
                                        rcLayer.getFrameWidth   () ) ); 
  }


  for(uiView=0;uiView<uiNumberOfViews;uiView++){ 
  //===== init bitstream writer =====
	  if( m_pcEncoderCodingParameter[uiView]->getMVCmode() )
	  {
	  //SEI {
			if( m_pcEncoderCodingParameter[uiView]->getViewScalInfoSEIEnable() )
			{
			m_cWriteToBitFileTempName                 = m_cEncoderIoParameter.cBitstreamFilename[uiView] + ".temp";
			m_cWriteToBitFileName                     = m_cEncoderIoParameter.cBitstreamFilename[uiView];
			m_cEncoderIoParameter.cBitstreamFilename[uiView]  = m_cWriteToBitFileTempName;

			}
		  

		}
	  else
	  {
		m_cWriteToBitFileTempName                 = m_cEncoderIoParameter.cBitstreamFilename[uiView] + ".temp";
		m_cWriteToBitFileName                     = m_cEncoderIoParameter.cBitstreamFilename[uiView];
		m_cEncoderIoParameter.cBitstreamFilename[uiView]  = m_cWriteToBitFileTempName;		
	  }
  }

 

  //===== create encoder instance =====
  for(uiView=0;uiView<uiNumberOfViews;uiView++){ 
	RNOK( h264::CreaterH264AVCEncoder::create( m_pcH264AVCEncoder[uiView] ) );
  }


  //===== set start code =====
  m_aucStartCodeBuffer[0] = 0;
  m_aucStartCodeBuffer[1] = 0;
  m_aucStartCodeBuffer[2] = 0;
  m_aucStartCodeBuffer[3] = 1;
  m_cBinDataStartCode.reset ();
  m_cBinDataStartCode.set   ( m_aucStartCodeBuffer, 4 );

  // Extended NAL unit priority is enabled by default, since 6-bit short priority
  // is incompatible with extended 4CIF Palma test set.  Change value to false
  // to enable short ID.
   for(uiView=0;i<uiNumberOfViews;uiView++){
		m_pcEncoderCodingParameter[uiView]->setExtendedPriorityId( true );
   }

  // Example priority ID assignment: (a) spatial, (b) temporal, (c) quality
  // Other priority assignments can be created by adjusting the mapping table.
  // (J. Ridge, Nokia)

   
   for(uiView=0;uiView<uiNumberOfViews;uiView++){
	  if ( !m_pcEncoderCodingParameter[uiView]->getExtendedPriorityId() )
	  {
		UInt  uiPriorityId = 0;
		for( UInt uiLayer = 0; uiLayer < m_pcEncoderCodingParameter[i]->getNumberOfLayers(); uiLayer++ )
		{
			UInt uiBitplanes;
			if ( m_pcEncoderCodingParameter[uiView]->getLayerParameters( uiLayer ).getFGSMode() > 0 )
			{
			  uiBitplanes = MAX_QUALITY_LEVELS - 1;  
			}
			else {
			  uiBitplanes = (UInt) m_pcEncoderCodingParameter[uiView]->getLayerParameters( uiLayer ).getNumFGSLayers();
			  if ( m_pcEncoderCodingParameter[uiView]->getLayerParameters( uiLayer ).getNumFGSLayers() > (Double) uiBitplanes)
			  {
				uiBitplanes++;
			  }
			}
	 /*       for ( UInt uiTempLevel = 0; uiTempLevel <= m_pcEncoderCodingParameter[0]->getLayerParameters( uiLayer ).getDecompositionStages(); uiTempLevel++ )
			{
				for ( UInt uiQualLevel = 0; uiQualLevel <= uiBitplanes; uiQualLevel++ )
				{
					m_pcEncoderCodingParameter[0]->setSimplePriorityMap( uiPriorityId++, uiTempLevel, uiLayer, uiQualLevel );
					AOF( uiPriorityId > ( 1 << PRI_ID_BITS ) );
				}
			}
	 JVT-S036  */
		}

		m_pcEncoderCodingParameter[uiView]->setNumSimplePris( uiPriorityId );
	  }
   }

  
  return Err::m_nOK;
}
예제 #3
0
파일: ReadYuvFile.cpp 프로젝트: hwyhit/JSVM
ErrVal ReadYuvFile::xReadPlane( UChar *pucDest, UInt uiBufHeight, UInt uiBufWidth, UInt uiBufStride, UInt uiPicHeight, UInt uiPicWidth, UInt uiStartLine, UInt uiEndLine )
{
  UInt uiClearSize = uiBufWidth - uiPicWidth;

  ROT( 0 > (Int)uiClearSize );
  ROT( uiBufHeight < uiPicHeight );

  // clear skiped buffer above reading section and skip in file
  if( 0 != uiStartLine )
  {
    UInt uiLines = uiStartLine;
    ::memset( pucDest, 0, uiBufWidth * uiLines );
    pucDest += uiBufStride * uiLines;
    RNOKRS(m_cFile.seek( uiPicWidth * uiLines, SEEK_CUR), Err::m_nEndOfFile);
  }


  UInt uiEnd = gMin (uiPicHeight, uiEndLine);

  for( UInt yR = uiStartLine; yR < uiEnd; yR++ )
  {
    UInt uiBytesRead;
    RNOKS( m_cFile.read( pucDest, uiPicWidth, uiBytesRead ) );
    ::memset( &pucDest[uiPicWidth], 0, uiClearSize );
    pucDest += uiBufStride;
  }

  // clear skiped buffer below reading section and skip in file
  if( uiEnd != uiPicHeight )
  {
    UInt uiLines = uiPicHeight - uiEnd;
    ::memset( pucDest, 0, uiBufWidth * uiLines );
    pucDest += uiBufStride * uiLines;
    RNOKRS(m_cFile.seek( uiPicWidth * uiLines, SEEK_CUR), Err::m_nEndOfFile);
  }

  // clear remaining buffer
  if( uiPicHeight != uiBufHeight )
  {
    if( uiEnd != uiPicHeight )
    {
      UInt uiLines = uiBufHeight - uiPicHeight;
      ::memset( pucDest, 0, uiBufWidth * uiLines);
    }
    else
    {
      switch( m_eFillMode )
      {
        case FILL_CLEAR:
        {
          UInt uiLines = uiBufHeight - uiPicHeight;
          ::memset( pucDest, 0, uiBufWidth * uiLines);
        }
        break;
        case FILL_FRAME:
        {
          for( UInt y = uiPicHeight; y < uiBufHeight; y++ )
          {
            memcpy( pucDest, pucDest - uiBufStride, uiBufStride );
            pucDest += uiBufStride;
          }
        }
        break;
        case FILL_FIELD:
        {
          ROT( (uiBufHeight - uiPicHeight) & 1 );
          for( UInt y = uiPicHeight; y < uiBufHeight; y+=2 )
          {
            memcpy( pucDest, pucDest - 2*uiBufStride, 2*uiBufStride );
            pucDest += 2*uiBufStride;
          }
        }
        break;
        default:
          AF()
        break;
      }
    }
  }

  return Err::m_nOK;
}
예제 #4
0
파일: Vui.cpp 프로젝트: hwyhit/JSVM
ErrVal VUI::read( HeaderSymbolReadIf *pcReadIf )
{
  RNOKS( m_cAspectRatioInfo.read( pcReadIf ) );

  RNOKS( pcReadIf->getFlag( m_bOverscanInfoPresentFlag,          "VUI: overscan_info_present_flag"));
  if( m_bOverscanInfoPresentFlag )
  {
    RNOKS( pcReadIf->getFlag( m_bOverscanAppropriateFlag,        "VUI: overscan_appropriate_flag"));
  }

  RNOKS( m_cVideoSignalType.read( pcReadIf ) );
  RNOKS( m_cChromaLocationInfo.read( pcReadIf ) );

  m_acLayerInfo           .uninit();
  m_acTimingInfo          .uninit();
  m_acNalHrd              .uninit();
  m_acVclHrd              .uninit();
  m_abLowDelayHrdFlag     .uninit();
  m_abPicStructPresentFlag.uninit();
  m_acLayerInfo           .init( 1 );
	m_acTimingInfo          .init( 1 );
  m_acNalHrd              .init( 1 );
  m_acVclHrd              .init( 1 );
  m_abLowDelayHrdFlag     .init( 1 );
  m_abPicStructPresentFlag.init( 1 );

  // fill in the LayerInfo of AVC compatible layer
  m_uiDefaultIdx = 0;
  m_acLayerInfo[m_uiDefaultIdx].setDependencyID(0);
  m_acLayerInfo[m_uiDefaultIdx].setQualityLevel(0);
  m_acLayerInfo[m_uiDefaultIdx].setTemporalId(0);

	RNOKS( m_acTimingInfo.get(m_uiDefaultIdx).read( pcReadIf ) );
  RNOKS( m_acNalHrd.get(m_uiDefaultIdx).read( pcReadIf ) );
  RNOKS( m_acVclHrd.get(m_uiDefaultIdx).read( pcReadIf ) );
  if( m_acNalHrd.get(m_uiDefaultIdx).getHrdParametersPresentFlag() || m_acVclHrd.get(m_uiDefaultIdx).getHrdParametersPresentFlag() )
  {
    RNOKS( pcReadIf->getFlag( m_abLowDelayHrdFlag[m_uiDefaultIdx],                "VUI: low_delay_hrd_flag"));
  }
  RNOKS( pcReadIf->getFlag( m_abPicStructPresentFlag[m_uiDefaultIdx],             "VUI: pic_struct_present_flag"));

  RNOKS( m_cBitstreamRestriction.read( pcReadIf ) );
  return Err::m_nOK;
}