Exemplo n.º 1
0
//-------------------------------------------------------------------
// Initialise the source reader
//
HRESULT VidReader::initSourceReader(WCHAR *filename)
{
    HRESULT hr = S_OK;
    IMFAttributes *pAttributes = NULL;

    SafeRelease(&m_pReader);

	// Configure the source reader to perform video processing
    hr = MFCreateAttributes(&pAttributes, 1);
	if (FAILED(hr)) goto done;    
	hr = pAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, TRUE);
    if (FAILED(hr)) goto done;

    // Create the source reader from the URL
    hr = MFCreateSourceReaderFromURL(filename, pAttributes, &m_pReader);
    if (FAILED(hr)) goto done;

	// Attempt to find a video stream
    hr = selectVideoStream();
    if (FAILED(hr)) goto done;

	// Get the stream format
	hr = getVideoFormat();
    if (FAILED(hr)) goto done;

	// Get the duration
	hr = getDuration();

done:    
    return hr;
}
Exemplo n.º 2
0
//-------------------------------------------------------------------
// Read a frame and provide access to the data
//
HRESULT VidReader::getReadBuffer(BYTE **ppData)
{
    HRESULT     hr = S_OK;
    DWORD       dwFlags = 0;
    DWORD       cbBitmapData = 0;       // Size of data, in bytes
	IMFSample	*pSample;

	if (!m_pReader) return E_ABORT; // if no source reader run away

    while (1)
    {
        hr = m_pReader->ReadSample(
            (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
            0, NULL, &dwFlags, &m_timestamp, &pSample );

        if (FAILED(hr)) goto done;

        if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM)
        {
            break;
        }

        if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
        {
            // Type change. Get the new format.
            hr = getVideoFormat();
            if (FAILED(hr)) goto done;
        }

        if (pSample == NULL)
        {
            continue;
        }

        // We got a sample.
        break;
	}

    if (pSample)
    {
        UINT32 pitch = 4 * m_imagewidth; 

        hr = pSample->ConvertToContiguousBuffer(&m_pBuffer);
        if (FAILED(hr)) goto done;

		hr = m_pBuffer->Lock(ppData, NULL, &cbBitmapData);
        if (FAILED(hr)) goto done;

        assert(cbBitmapData == (pitch * m_imageheight));
    }
    else
    {
        hr = MF_E_END_OF_STREAM;
    }

done:
	SafeRelease(&pSample);
	return hr;
}
Exemplo n.º 3
0
void VideoDevice::saveConfiguration(Misc::ConfigurationFileSection& cfg) const
	{
	/* Get the device's current video format: */
	VideoDataFormat currentFormat=getVideoFormat();
	
	/* Save the current frame size: */
	cfg.storeValueWC("./frameSize",currentFormat.size,Misc::CFixedArrayValueCoder<unsigned int,2>());
	
	/* Save the current frame rate: */
	cfg.storeValue("./frameRate",double(currentFormat.frameIntervalDenominator)/double(currentFormat.frameIntervalCounter));
	
	/* Check if the current pixel format is a valid FourCC code: */
	char fourCCBuffer[5];
	currentFormat.getFourCC(fourCCBuffer);
	bool valid=true;
	for(int i=0;i<4&&valid;++i)
		valid=fourCCBuffer[i]>=32&&fourCCBuffer[i]<127&&fourCCBuffer[i]!='"';
	if(valid)
		{
		/* Save the current pixel format as a FourCC code: */
		cfg.storeValue<std::string>("./pixelFormat",fourCCBuffer);
		}
	else
		{
		/* Save the current pixel format as a hexadecimal number: */
		char hexBuffer[9];
		unsigned int pixelFormat=currentFormat.pixelFormat;
		for(int i=0;i<8;++i,pixelFormat>>=4)
			{
			if((pixelFormat&0x0fU)>=10U)
				hexBuffer[7-i]=(pixelFormat&0x0fU)+'a';
			else
				hexBuffer[7-i]=(pixelFormat&0x0fU)+'0';
			}
		hexBuffer[8]='\0';
		cfg.storeString("./pixelFormatHex",hexBuffer);
		}
	}
Exemplo n.º 4
0
Void TEncTop::xInitSPS()
{
  ProfileTierLevel& profileTierLevel = *m_cSPS.getPTL()->getGeneralPTL();
  profileTierLevel.setLevelIdc(m_level);
  profileTierLevel.setTierFlag(m_levelTier);
  profileTierLevel.setProfileIdc(m_profile);
  profileTierLevel.setProfileCompatibilityFlag(m_profile, 1);
  profileTierLevel.setProgressiveSourceFlag(m_progressiveSourceFlag);
  profileTierLevel.setInterlacedSourceFlag(m_interlacedSourceFlag);
  profileTierLevel.setNonPackedConstraintFlag(m_nonPackedConstraintFlag);
  profileTierLevel.setFrameOnlyConstraintFlag(m_frameOnlyConstraintFlag);
  profileTierLevel.setBitDepthConstraint(m_bitDepthConstraintValue);
  profileTierLevel.setChromaFormatConstraint(m_chromaFormatConstraintValue);
  profileTierLevel.setIntraConstraintFlag(m_intraConstraintFlag);
  profileTierLevel.setLowerBitRateConstraintFlag(m_lowerBitRateConstraintFlag);

  if ((m_profile == Profile::MAIN10) && (m_bitDepth[CHANNEL_TYPE_LUMA] == 8) && (m_bitDepth[CHANNEL_TYPE_CHROMA] == 8))
  {
    /* The above constraint is equal to Profile::MAIN */
    profileTierLevel.setProfileCompatibilityFlag(Profile::MAIN, 1);
  }
  if (m_profile == Profile::MAIN)
  {
    /* A Profile::MAIN10 decoder can always decode Profile::MAIN */
    profileTierLevel.setProfileCompatibilityFlag(Profile::MAIN10, 1);
  }
  /* XXX: should Main be marked as compatible with still picture? */
  /* XXX: may be a good idea to refactor the above into a function
   * that chooses the actual compatibility based upon options */

  m_cSPS.setPicWidthInLumaSamples  ( m_iSourceWidth      );
  m_cSPS.setPicHeightInLumaSamples ( m_iSourceHeight     );
  m_cSPS.setConformanceWindow      ( m_conformanceWindow );
  m_cSPS.setMaxCUWidth             ( m_maxCUWidth        );
  m_cSPS.setMaxCUHeight            ( m_maxCUHeight       );
  m_cSPS.setMaxTotalCUDepth        ( m_maxTotalCUDepth   );
  m_cSPS.setChromaFormatIdc( m_chromaFormatIDC);
  m_cSPS.setLog2DiffMaxMinCodingBlockSize(m_log2DiffMaxMinCodingBlockSize);

  Int minCUSize = m_cSPS.getMaxCUWidth() >> ( m_cSPS.getLog2DiffMaxMinCodingBlockSize() );
  Int log2MinCUSize = 0;
  while(minCUSize > 1)
  {
    minCUSize >>= 1;
    log2MinCUSize++;
  }

  m_cSPS.setLog2MinCodingBlockSize(log2MinCUSize);

  m_cSPS.setPCMLog2MinSize (m_uiPCMLog2MinSize);
  m_cSPS.setUsePCM        ( m_usePCM           );
  m_cSPS.setPCMLog2MaxSize( m_pcmLog2MaxSize  );

  m_cSPS.setQuadtreeTULog2MaxSize( m_uiQuadtreeTULog2MaxSize );
  m_cSPS.setQuadtreeTULog2MinSize( m_uiQuadtreeTULog2MinSize );
  m_cSPS.setQuadtreeTUMaxDepthInter( m_uiQuadtreeTUMaxDepthInter    );
  m_cSPS.setQuadtreeTUMaxDepthIntra( m_uiQuadtreeTUMaxDepthIntra    );

  m_cSPS.setTMVPFlagsPresent((getTMVPModeId() == 2 || getTMVPModeId() == 1));

  m_cSPS.setMaxTrSize   ( 1 << m_uiQuadtreeTULog2MaxSize );

  m_cSPS.setUseAMP ( m_useAMP );

  for (UInt channelType = 0; channelType < MAX_NUM_CHANNEL_TYPE; channelType++)
  {
    m_cSPS.setBitDepth      (ChannelType(channelType), m_bitDepth[channelType] );
#if O0043_BEST_EFFORT_DECODING
    m_cSPS.setStreamBitDepth(ChannelType(channelType), m_bitDepth[channelType] );
#endif
    m_cSPS.setQpBDOffset  (ChannelType(channelType), (6 * (m_bitDepth[channelType] - 8)));
    m_cSPS.setPCMBitDepth (ChannelType(channelType), m_PCMBitDepth[channelType]         );
  }

  m_cSPS.setUseExtendedPrecision(m_useExtendedPrecision);
  m_cSPS.setUseHighPrecisionPredictionWeighting(m_useHighPrecisionPredictionWeighting);

  m_cSPS.setUseSAO( m_bUseSAO );
  m_cSPS.setUseResidualRotation(m_useResidualRotation);
  m_cSPS.setUseSingleSignificanceMapContext(m_useSingleSignificanceMapContext);
  m_cSPS.setUseGolombRiceParameterAdaptation(m_useGolombRiceParameterAdaptation);
  m_cSPS.setAlignCABACBeforeBypass(m_alignCABACBeforeBypass);

  for (UInt signallingModeIndex = 0; signallingModeIndex < NUMBER_OF_RDPCM_SIGNALLING_MODES; signallingModeIndex++)
  {
    m_cSPS.setUseResidualDPCM(RDPCMSignallingMode(signallingModeIndex), m_useResidualDPCM[signallingModeIndex]);
  }

  m_cSPS.setMaxTLayers( m_maxTempLayer );
  m_cSPS.setTemporalIdNestingFlag( ( m_maxTempLayer == 1 ) ? true : false );

  for (Int i = 0; i < min(m_cSPS.getMaxTLayers(),(UInt) MAX_TLAYER); i++ )
  {
    m_cSPS.setMaxDecPicBuffering(m_maxDecPicBuffering[i], i);
    m_cSPS.setNumReorderPics(m_numReorderPics[i], i);
  }

  m_cSPS.setPCMFilterDisableFlag  ( m_bPCMFilterDisableFlag );
  m_cSPS.setDisableIntraReferenceSmoothing( m_disableIntraReferenceSmoothing );
  m_cSPS.setScalingListFlag ( (m_useScalingListId == SCALING_LIST_OFF) ? 0 : 1 );
  m_cSPS.setUseStrongIntraSmoothing( m_useStrongIntraSmoothing );
  m_cSPS.setVuiParametersPresentFlag(getVuiParametersPresentFlag());

  if (m_cSPS.getVuiParametersPresentFlag())
  {
    TComVUI* pcVUI = m_cSPS.getVuiParameters();
    pcVUI->setAspectRatioInfoPresentFlag(getAspectRatioInfoPresentFlag());
    pcVUI->setAspectRatioIdc(getAspectRatioIdc());
    pcVUI->setSarWidth(getSarWidth());
    pcVUI->setSarHeight(getSarHeight());
    pcVUI->setOverscanInfoPresentFlag(getOverscanInfoPresentFlag());
    pcVUI->setOverscanAppropriateFlag(getOverscanAppropriateFlag());
    pcVUI->setVideoSignalTypePresentFlag(getVideoSignalTypePresentFlag());
    pcVUI->setVideoFormat(getVideoFormat());
    pcVUI->setVideoFullRangeFlag(getVideoFullRangeFlag());
    pcVUI->setColourDescriptionPresentFlag(getColourDescriptionPresentFlag());
    pcVUI->setColourPrimaries(getColourPrimaries());
    pcVUI->setTransferCharacteristics(getTransferCharacteristics());
    pcVUI->setMatrixCoefficients(getMatrixCoefficients());
    pcVUI->setChromaLocInfoPresentFlag(getChromaLocInfoPresentFlag());
    pcVUI->setChromaSampleLocTypeTopField(getChromaSampleLocTypeTopField());
    pcVUI->setChromaSampleLocTypeBottomField(getChromaSampleLocTypeBottomField());
    pcVUI->setNeutralChromaIndicationFlag(getNeutralChromaIndicationFlag());
    pcVUI->setDefaultDisplayWindow(getDefaultDisplayWindow());
    pcVUI->setFrameFieldInfoPresentFlag(getFrameFieldInfoPresentFlag());
    pcVUI->setFieldSeqFlag(false);
    pcVUI->setHrdParametersPresentFlag(false);
    pcVUI->getTimingInfo()->setPocProportionalToTimingFlag(getPocProportionalToTimingFlag());
    pcVUI->getTimingInfo()->setNumTicksPocDiffOneMinus1   (getNumTicksPocDiffOneMinus1()   );
    pcVUI->setBitstreamRestrictionFlag(getBitstreamRestrictionFlag());
    pcVUI->setTilesFixedStructureFlag(getTilesFixedStructureFlag());
    pcVUI->setMotionVectorsOverPicBoundariesFlag(getMotionVectorsOverPicBoundariesFlag());
    pcVUI->setMinSpatialSegmentationIdc(getMinSpatialSegmentationIdc());
    pcVUI->setMaxBytesPerPicDenom(getMaxBytesPerPicDenom());
    pcVUI->setMaxBitsPerMinCuDenom(getMaxBitsPerMinCuDenom());
    pcVUI->setLog2MaxMvLengthHorizontal(getLog2MaxMvLengthHorizontal());
    pcVUI->setLog2MaxMvLengthVertical(getLog2MaxMvLengthVertical());
  }
  m_cSPS.setNumLongTermRefPicSPS(NUM_LONG_TERM_REF_PIC_SPS);
  assert (NUM_LONG_TERM_REF_PIC_SPS <= MAX_NUM_LONG_TERM_REF_PICS);
  for (Int k = 0; k < NUM_LONG_TERM_REF_PIC_SPS; k++)
  {
    m_cSPS.setLtRefPicPocLsbSps(k, 0);
    m_cSPS.setUsedByCurrPicLtSPSFlag(k, 0);
  }
  if( getPictureTimingSEIEnabled() || getDecodingUnitInfoSEIEnabled() )
  {
    Bool useDUParameters = (getSliceMode() > 0) || (getSliceSegmentMode() > 0);
    m_cSPS.setHrdParameters( getFrameRate(), useDUParameters, getTargetBitrate(), ( getIntraPeriod() > 0 ) );
  }
  if( getBufferingPeriodSEIEnabled() || getPictureTimingSEIEnabled() || getDecodingUnitInfoSEIEnabled() )
  {
    m_cSPS.getVuiParameters()->setHrdParametersPresentFlag( true );
  }
}
Exemplo n.º 5
0
void VideoDevice::configure(const Misc::ConfigurationFileSection& cfg)
	{
	/* Get the device's current video format to use as default: */
	VideoDataFormat currentFormat=getVideoFormat();
	
	/* Get the list of the device's supported video formats: */
	std::vector<VideoDataFormat> deviceFormats=getVideoFormatList();
	
	/* Read the requested frame size: */
	currentFormat.size[0]=cfg.retrieveValue<unsigned int>("./width",currentFormat.size[0]);
	currentFormat.size[1]=cfg.retrieveValue<unsigned int>("./height",currentFormat.size[1]);
	
	/* Find the best-matching frame size among the supported video formats: */
	std::vector<VideoDataFormat>::iterator bestSizeMatch=deviceFormats.end();
	double bestSizeMatchRatio=1.0e10;
	for(std::vector<VideoDataFormat>::iterator dfIt=deviceFormats.begin();dfIt!=deviceFormats.end();++dfIt)
		{
		/* Calculate the format's size mismatch: */
		double sizeMatchRatio=0.0;
		for(int i=0;i<2;++i)
			{
			if(dfIt->size[i]<currentFormat.size[i])
				sizeMatchRatio+=double(currentFormat.size[i])/double(dfIt->size[i]);
			else
				sizeMatchRatio+=double(dfIt->size[i])/double(currentFormat.size[i]);
			}
		if(bestSizeMatchRatio>sizeMatchRatio)
			{
			bestSizeMatch=dfIt;
			bestSizeMatchRatio=sizeMatchRatio;
			}
		}
	currentFormat.size[0]=bestSizeMatch->size[0];
	currentFormat.size[1]=bestSizeMatch->size[1];
	
	/* Read the requested frame rate: */
	double frameRate=cfg.retrieveValue<double>("./frameRate",double(currentFormat.frameIntervalDenominator)/double(currentFormat.frameIntervalCounter));
	
	/* Find the best-matching frame rate among the supporting video formats: */
	std::vector<VideoDataFormat>::iterator bestRateMatch=deviceFormats.end();
	double bestRateMatchRatio=1.0e10;
	for(std::vector<VideoDataFormat>::iterator dfIt=deviceFormats.begin();dfIt!=deviceFormats.end();++dfIt)
		if(dfIt->size[0]==currentFormat.size[0]&&dfIt->size[1]==currentFormat.size[1])
			{
			/* Calculate the format's frame rate mismatch: */
			double rate=double(dfIt->frameIntervalDenominator)/double(dfIt->frameIntervalCounter);
			double rateMatchRatio;
			if(rate<frameRate)
				rateMatchRatio=frameRate/rate;
			else
				rateMatchRatio=rate/frameRate;
			if(bestRateMatchRatio>rateMatchRatio)
				{
				bestRateMatch=dfIt;
				bestRateMatchRatio=rateMatchRatio;
				}
			}
	currentFormat.pixelFormat=bestRateMatch->pixelFormat;
	currentFormat.frameIntervalCounter=bestRateMatch->frameIntervalCounter;
	currentFormat.frameIntervalDenominator=bestRateMatch->frameIntervalDenominator;
	
	/* Set the selected video format: */
	setVideoFormat(currentFormat);
	}
Exemplo n.º 6
0
Profile Configuration::getProfile() const{
	Profile profile(getName());

	MediaElement::Container container;
	if(getContainer(container)){
		profile.setContainer(container);
	}
	int row;
	if(getVideoMode(row)){
		profile.setVideoMode(row);
	}
	MediaElement::Format videoFormat;
	if(getVideoFormat(videoFormat)){
		profile.setVideoFormat(videoFormat);
	}
	MediaElement::Encoder videoEncoder;
	if(getVideoEncoder(videoEncoder)){
		profile.setVideoEncoder(videoEncoder);
	}
	MediaElement::Bitrate videoBitrate;
	if(getVideoBitrate(videoBitrate)){
		profile.setVideoBitrate(videoBitrate);
	}
	MediaElement::Resolution resolution;
	if(getVideoResolution(resolution)){
		profile.setVideoResolution(resolution);
	}
	MediaElement::FFpreset ffpreset;
	if(getVideoFFpreset(ffpreset)){
		profile.setVideoFFpreset(ffpreset);
	}
	MediaElement::Framerate framerate;
	if(getVideoFramerate(framerate)){
		profile.setVideoFramerate(framerate);
	}
	//
	if(getAudioMode(row)){
		profile.setAudioMode(row);
	}
	MediaElement::Format audioFormat;
	if(getAudioFormat(audioFormat)){
		profile.setAudioFormat(audioFormat);
	}
	MediaElement::Encoder audioEncoder;
	if(getAudioEncoder(audioEncoder)){
		profile.setAudioEncoder(audioEncoder);
	}
	MediaElement::AudioGrade audioGrade;
	if(getAudioGrade(audioGrade)){
		profile.setAudioGrade(audioGrade);
	}
	MediaElement::Samplerate audioSamplerate;
	if(getAudioSamplerate(audioSamplerate)){
		profile.setAudioSamplerate(audioSamplerate);
	}
	MediaElement::Channel audioChannel;
	if(getAudioChannel(audioChannel)){
		profile.setAudioChannel(audioChannel);
	}
	//
	std::list<Profile::ManualSettings> manualSettings;
	getManualSettings(manualSettings);
	profile.setManualSettings(manualSettings);
	return profile;
}