コード例 #1
0
ファイル: Pipe.cpp プロジェクト: DKlaper/gsw-DepParser
void Pipe::Initialize() {
  CreateDictionary();
  CreateReader();
  CreateWriter();
  CreateDecoder();
  parameters_ = new Parameters;
}
コード例 #2
0
nsresult
H264Converter::CreateDecoderAndInit(MediaRawData* aSample)
{
  RefPtr<MediaByteBuffer> extra_data =
    mp4_demuxer::AnnexB::ExtractExtraData(aSample);
  if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) {
    return NS_ERROR_NOT_INITIALIZED;
  }
  UpdateConfigFromExtraData(extra_data);

  nsresult rv =
    CreateDecoder(mCurrentConfig, /* DecoderDoctorDiagnostics* */ nullptr);

  if (NS_SUCCEEDED(rv)) {
    // Queue the incoming sample.
    mPendingSample = aSample;

    mDecoder->Init()
      ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, this,
             &H264Converter::OnDecoderInitDone,
             &H264Converter::OnDecoderInitFailed)
      ->Track(mInitPromiseRequest);
    return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER;
  }
  return rv;
}
コード例 #3
0
nsresult
H264Converter::CreateDecoderAndInit(MediaRawData* aSample)
{
  RefPtr<MediaByteBuffer> extra_data =
    mp4_demuxer::AnnexB::ExtractExtraData(aSample);
  if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) {
    return NS_ERROR_NOT_INITIALIZED;
  }
  UpdateConfigFromExtraData(extra_data);

  nsresult rv = CreateDecoder(/* DecoderDoctorDiagnostics* */ nullptr);

  if (NS_SUCCEEDED(rv)) {
    // Queue the incoming sample.
    mMediaRawSamples.AppendElement(aSample);

    RefPtr<H264Converter> self = this;

    mInitPromiseRequest.Begin(mDecoder->Init()
      ->Then(AbstractThread::GetCurrent()->AsTaskQueue(), __func__, this,
             &H264Converter::OnDecoderInitDone,
             &H264Converter::OnDecoderInitFailed));
  }
  return rv;
}
コード例 #4
0
nsresult
MediaCodecDataDecoder::InitDecoder(Surface::Param aSurface)
{
  mDecoder = CreateDecoder(mMimeType);

  if (!mDecoder) {
    INVOKE_CALLBACK(Error,
                    MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
    return NS_ERROR_FAILURE;
  }

  // Check if the video codec supports adaptive playback or not.
  if (aSurface && java::HardwareCodecCapabilityUtils::CheckSupportsAdaptivePlayback(
                    mDecoder, nsCString(TranslateMimeType(mMimeType)))) {
      // TODO: may need to find a way to not use hard code to decide the max w/h.
      mFormat->SetInteger(MediaFormat::KEY_MAX_WIDTH, 1920);
      mFormat->SetInteger(MediaFormat::KEY_MAX_HEIGHT, 1080);
  }

  MediaCrypto::LocalRef crypto = MediaDrmProxy::GetMediaCrypto(mDrmStubId);
  bool hascrypto = !!crypto;
  LOG("Has(%d) MediaCrypto (%s)", hascrypto, NS_ConvertUTF16toUTF8(mDrmStubId).get());
  nsresult rv;
  NS_ENSURE_SUCCESS(rv = mDecoder->Configure(mFormat, aSurface, crypto, 0), rv);
  NS_ENSURE_SUCCESS(rv = mDecoder->Start(), rv);

  NS_ENSURE_SUCCESS(rv = ResetInputBuffers(), rv);
  NS_ENSURE_SUCCESS(rv = ResetOutputBuffers(), rv);

  nsCOMPtr<nsIRunnable> r = NewRunnableMethod(this, &MediaCodecDataDecoder::DecoderLoop);
  rv = NS_NewNamedThread("MC Decoder", getter_AddRefs(mThread), r);

  return rv;
}
コード例 #5
0
ファイル: H264Converter.cpp プロジェクト: luke-chang/gecko-1
MediaResult
H264Converter::CreateDecoderAndInit(MediaRawData* aSample)
{
  RefPtr<MediaByteBuffer> extra_data =
    H264::ExtractExtraData(aSample);
  bool inbandExtradata = H264::HasSPS(extra_data);
  if (!inbandExtradata &&
      !H264::HasSPS(mCurrentConfig.mExtraData)) {
    return NS_ERROR_NOT_INITIALIZED;
  }

  if (inbandExtradata) {
    UpdateConfigFromExtraData(extra_data);
  }

  MediaResult rv =
    CreateDecoder(mCurrentConfig, /* DecoderDoctorDiagnostics* */ nullptr);

  if (NS_SUCCEEDED(rv)) {
    RefPtr<H264Converter> self = this;
    RefPtr<MediaRawData> sample = aSample;
    mDecoder->Init()
      ->Then(
        AbstractThread::GetCurrent()->AsTaskQueue(),
        __func__,
        [self, sample, this](const TrackType aTrackType) {
          mInitPromiseRequest.Complete();
          mNeedAVCC =
            Some(mDecoder->NeedsConversion() == ConversionRequired::kNeedAVCC);
          mCanRecycleDecoder = Some(CanRecycleDecoder());

          if (!mFlushPromise.IsEmpty()) {
            // A Flush is pending, abort the current operation.
            mFlushPromise.Resolve(true, __func__);
            return;
          }

          DecodeFirstSample(sample);
        },
        [self, this](const MediaResult& aError) {
          mInitPromiseRequest.Complete();

          if (!mFlushPromise.IsEmpty()) {
            // A Flush is pending, abort the current operation.
            mFlushPromise.Reject(aError, __func__);
            return;
          }

          mDecodePromise.Reject(
            MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
                        RESULT_DETAIL("Unable to initialize H264 decoder")),
            __func__);
        })
      ->Track(mInitPromiseRequest);
    return NS_ERROR_DOM_MEDIA_INITIALIZING_DECODER;
  }
  return rv;
}
コード例 #6
0
ファイル: dxva2decoder.cpp プロジェクト: tomhughes/mythtv
bool DXVA2Decoder::Init(MythRenderD3D9* render)
{
    bool ok = true;
    CREATE_CHECK(m_width > 0,  "Invalid width.")
    CREATE_CHECK(m_height > 0, "Invalid height.")
    CREATE_CHECK(CreateVideoService(render), "Failed to create video service.")
    CREATE_CHECK(GetInputOutput(), "Failed to find input/output combination.")
    InitFormat();
    CREATE_CHECK(GetDecoderConfig(), "Failed to find a raw input bitstream.")
    CREATE_CHECK(CreateSurfaces(), "Failed to create surfaces.")
    CREATE_CHECK(CreateDecoder(), "Failed to create decoder.")
    return ok;
}
コード例 #7
0
ファイル: H264Converter.cpp プロジェクト: hoosteeno/gecko-dev
nsresult
H264Converter::CreateDecoderAndInit(MediaRawData* aSample)
{
  nsRefPtr<MediaByteBuffer> extra_data =
    mp4_demuxer::AnnexB::ExtractExtraData(aSample);
  if (!mp4_demuxer::AnnexB::HasSPS(extra_data)) {
    return NS_ERROR_NOT_INITIALIZED;
  }
  UpdateConfigFromExtraData(extra_data);

  nsresult rv = CreateDecoder();
  NS_ENSURE_SUCCESS(rv, rv);
  return Init();
}
コード例 #8
0
/* Handle decoding for a frame.  Return true if a frame was decoded, false if not
 * (due to pause, EOF, etc).  If true is returned, we'll be in FRAME_DECODED. */
bool MovieTexture_FFMpeg::DecodeFrame()
{
	ASSERT_M( m_ImageWaiting == FRAME_NONE, ssprintf("%i", m_ImageWaiting) );

	if( m_State == DECODER_QUIT )
		return false;
	CHECKPOINT;

	/* Read a frame. */
	int ret = decoder->GetFrame();
	if( ret == -1 )
		return false;

	if( m_bWantRewind && decoder->GetTimestamp() == 0 )
		m_bWantRewind = false; /* ignore */

	if( ret == 0 )
	{
		/* EOF. */
		if( !m_bLoop )
			return false;

		LOG->Trace( "File \"%s\" looping", GetID().filename.c_str() );
		m_bWantRewind = true;
	}

	if( m_bWantRewind )
	{
		m_bWantRewind = false;

		/* When resetting the clock, set it back by the length of the last frame,
		 * so it has a proper delay. */
		float fDelay = decoder->LastFrameDelay;

		/* Restart. */
		DestroyDecoder();
		CString sError = CreateDecoder();
		if( sError != "" )
			RageException::Throw( "Error rewinding stream %s: %s", GetID().filename.c_str(), sError.c_str() );

		decoder->Init();
		m_Clock = -fDelay;
		return false;
	}

	/* We got a frame. */
	m_ImageWaiting = FRAME_DECODED;

	return true;
}
コード例 #9
0
ファイル: decode_interface.c プロジェクト: pedroarthur/HLBR
/*************************************
* Set up the decoder
*************************************/
int InitDecoderInterface(){
	int DecoderID;

	DEBUGPATH;

	if ((DecoderID=CreateDecoder("Interface"))==DECODER_NONE){
		DBG( PRINTERROR("Couldn't Allocate Decoder Interface\n") );
		return FALSE;
	}
	
	Globals.Decoders[DecoderID].DecodeFunc=DecodeInterface;
	Globals.Decoders[DecoderID].Free=free;

	return TRUE;
}
コード例 #10
0
ファイル: BaseDecoderTest.cpp プロジェクト: BO45/openh264
void BaseDecoderTest::SetUp() {
  long rv = CreateDecoder(&decoder_);
  ASSERT_EQ(0, rv);
  ASSERT_TRUE(decoder_ != NULL);

  SDecodingParam decParam;
  memset(&decParam, 0, sizeof(SDecodingParam));
  decParam.iOutputColorFormat  = videoFormatI420;
  decParam.uiTargetDqLayer = UCHAR_MAX;
  decParam.uiEcActiveFlag  = 1;
  decParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;

  rv = decoder_->Initialize(&decParam);
  ASSERT_EQ(0, rv);
}
コード例 #11
0
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
                             const CreateDecoderParams& aParams)
  : mPDM(aPDM)
  , mOriginalConfig(aParams.VideoConfig())
  , mCurrentConfig(aParams.VideoConfig())
  , mKnowsCompositor(aParams.mKnowsCompositor)
  , mImageContainer(aParams.mImageContainer)
  , mTaskQueue(aParams.mTaskQueue)
  , mDecoder(nullptr)
  , mGMPCrashHelper(aParams.mCrashHelper)
  , mLastError(NS_OK)
  , mType(aParams.mType)
  , mOnWaitingForKeyEvent(aParams.mOnWaitingForKeyEvent)
{
  CreateDecoder(aParams.mDiagnostics);
}
コード例 #12
0
ファイル: mythraopconnection.cpp プロジェクト: stunami/mythtv
void MythRAOPConnection::audioRetry(void)
{
    if (!m_audio)
    {
        MythRAOPDevice* p = (MythRAOPDevice*)parent();
        if (p && p->NextInAudioQueue(this) && OpenAudioDevice())
        {
            CreateDecoder();
        }
    }

    if (m_audio && m_codec && m_codeccontext)
    {
        StopAudioTimer();
    }
}
コード例 #13
0
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
                             const CreateDecoderParams& aParams)
  : mPDM(aPDM)
  , mOriginalConfig(aParams.VideoConfig())
  , mCurrentConfig(aParams.VideoConfig())
  , mLayersBackend(aParams.mLayersBackend)
  , mImageContainer(aParams.mImageContainer)
  , mTaskQueue(aParams.mTaskQueue)
  , mCallback(aParams.mCallback)
  , mDecoder(nullptr)
  , mGMPCrashHelper(aParams.mCrashHelper)
  , mNeedAVCC(aPDM->DecoderNeedsConversion(aParams.mConfig) == PlatformDecoderModule::kNeedAVCC)
  , mLastError(NS_OK)
{
  CreateDecoder(aParams.mDiagnostics);
}
コード例 #14
0
CH264StreamDecodeSink::CH264StreamDecodeSink( UsageEnvironment& env, unsigned bufferSize, int nBandWidth )
: MediaSink( env )
{
	// 缓存码率.
	m_nBandWidth = nBandWidth;

	m_pBuffer = new unsigned char[ bufferSize ];
	memset( m_pBuffer, 0, bufferSize );
	const unsigned char start_code[4] = {0x00, 0x00, 0x00, 0x01};

	m_pData = m_pBuffer + sizeof( start_code );
	memcpy( m_pBuffer, start_code, sizeof( start_code ) );

	m_nDataBufferSize = bufferSize - sizeof( start_code );
	m_nDataLen = 0;

#ifdef USE_FFMPEG
	m_decoderId = CreateDecoder( CODEC_ID_H264 );
	if ( InvalidDecoder == m_decoderId )
	{
		// 出错了!
		mcu::tlog << _T( "创建H264解码器失败!" ) << endl;
		AfxMessageBox( _T( "无法创建H264解码器!" ) );
	}
#endif

	m_pDecoder = CDecoder::FindDecoder( MAIN_DECODER_NAME );
	if( m_pDecoder )
	{
		CDecoder::Release( m_pDecoder );
		m_pDecoder = NULL;
	}

	// 
	m_pDecoder = CDecoder::CreateDecoder( CDecoder::CODEC_H264, m_nBandWidth, MAIN_DECODER_NAME );
	if ( NULL == m_pDecoder )
	{
		// 出错了!
		mcu::tlog << _T( "创建H264解码器失败!" ) << endl;
		AfxMessageBox( _T( "无法创建H264解码器!" ) );
	}
//	_ASSERT( m_pDecoder );

	
}
コード例 #15
0
ファイル: H264Converter.cpp プロジェクト: hoosteeno/gecko-dev
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
                             const VideoInfo& aConfig,
                             layers::LayersBackend aLayersBackend,
                             layers::ImageContainer* aImageContainer,
                             FlushableTaskQueue* aVideoTaskQueue,
                             MediaDataDecoderCallback* aCallback)
  : mPDM(aPDM)
  , mCurrentConfig(aConfig)
  , mLayersBackend(aLayersBackend)
  , mImageContainer(aImageContainer)
  , mVideoTaskQueue(aVideoTaskQueue)
  , mCallback(aCallback)
  , mDecoder(nullptr)
  , mNeedAVCC(aPDM->DecoderNeedsConversion(aConfig) == PlatformDecoderModule::kNeedAVCC)
  , mLastError(NS_OK)
{
  CreateDecoder();
}
コード例 #16
0
nsresult MediaCodecDataDecoder::InitDecoder(Surface::Param aSurface)
{
  mDecoder = CreateDecoder(mMimeType);
  if (!mDecoder) {
    ENVOKE_CALLBACK(Error);
    return NS_ERROR_FAILURE;
  }

  nsresult rv;
  NS_ENSURE_SUCCESS(rv = mDecoder->Configure(mFormat, aSurface, nullptr, 0), rv);
  NS_ENSURE_SUCCESS(rv = mDecoder->Start(), rv);

  NS_ENSURE_SUCCESS(rv = ResetInputBuffers(), rv);
  NS_ENSURE_SUCCESS(rv = ResetOutputBuffers(), rv);

  NS_NewNamedThread("MC Decoder", getter_AddRefs(mThread),
                    NS_NewRunnableMethod(this, &MediaCodecDataDecoder::DecoderLoop));

  return NS_OK;
}
コード例 #17
0
ファイル: H264Converter.cpp プロジェクト: SJasoria/gecko-dev
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
                             const VideoInfo& aConfig,
                             layers::LayersBackend aLayersBackend,
                             layers::ImageContainer* aImageContainer,
                             TaskQueue* aTaskQueue,
                             MediaDataDecoderCallback* aCallback,
                             DecoderDoctorDiagnostics* aDiagnostics)
  : mPDM(aPDM)
  , mOriginalConfig(aConfig)
  , mCurrentConfig(aConfig)
  , mLayersBackend(aLayersBackend)
  , mImageContainer(aImageContainer)
  , mTaskQueue(aTaskQueue)
  , mCallback(aCallback)
  , mDecoder(nullptr)
  , mNeedAVCC(aPDM->DecoderNeedsConversion(aConfig) == PlatformDecoderModule::kNeedAVCC)
  , mLastError(NS_OK)
{
  CreateDecoder(aDiagnostics);
}
コード例 #18
0
int32_t WebrtcOpenH264VideoDecoder::InitDecode(
    const webrtc::VideoCodec* codecSettings,
    int32_t numberOfCores) {
  long rv = CreateDecoder (&decoder_);
  if (rv) {
    return WEBRTC_VIDEO_CODEC_ERROR;
  }

  SDecodingParam param;
  memset(&param, 0, sizeof(param));
  param.iOutputColorFormat = videoFormatI420;
  param.uiTargetDqLayer = UCHAR_MAX;  // TODO([email protected]): correct?
  param.uiEcActiveFlag = 1; // Error concealment on.
  param.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;

  long lrv = decoder_->Initialize(&param, INIT_TYPE_PARAMETER_BASED);
  if (lrv) {
    return WEBRTC_VIDEO_CODEC_ERROR;
  }

  return WEBRTC_VIDEO_CODEC_OK;
}
コード例 #19
0
ファイル: pipeline-vda.cpp プロジェクト: 499940913/moon
void
MoonVDADecoder::OpenDecoderAsyncInternal ()
{
	IMediaStream *stream = GetStream ();
	VideoStream *vs = (VideoStream *) stream;
	int format = 'avc1';

	CFDataRef avcCData = CFDataCreate (kCFAllocatorDefault, (const uint8_t*) stream->GetRawExtraData (), stream->GetRawExtraDataSize ());
	OSStatus status = CreateDecoder ((SInt32) vs->GetHeight (), (SInt32) vs->GetWidth (), (OSType) format, avcCData);

	if (avcCData) CFRelease (avcCData);

	if (status == kVDADecoderNoErr) {
		SetPixelFormat (MoonPixelFormat422YpCbCr8);

		ReportOpenDecoderCompleted ();
	} else {
		char *str = g_strdup_printf ("MoonVDADecoder failed to open codec (result: %d)", status);
		ReportErrorOccurred (str);
		g_free (str);
	}
}
コード例 #20
0
ファイル: mime_cs.cpp プロジェクト: prestocore/browser
void DecodedMIME_Storage::SetFinished(BOOL force)
{
	OP_STATUS op_err;
	if(!writing_to_self && !decoder && data.GetLength() != 0)
	{
		CreateDecoder(data.GetDirectPayload(),data.GetLength());
		TRAP(op_err, data.CommitSampledBytesL(data.GetLength()));
	}

	if(writing_to_self || !decoder)
	{
		Decode_Storage::SetFinished(force);
		if((URLStatus) url->GetAttribute(URL::KLoadStatus) == URL_LOADING)
			url->SetAttribute(URL::KLoadStatus, URL_LOADED);
	}
	else if(decoder)
	{
		TRAP(op_err, decoder->FinishedLoadingL());
#ifdef MHTML_ARCHIVE_REDIRECT_SUPPORT
		valid_mhtml_archive = decoder->IsValidMHTMLArchive(); 
#endif

		if(!decode_only)
		{
			writing_to_self = TRUE;

			URL tmp(url, (char *) NULL);

			TRAP(op_err, decoder->RetrieveDataL(tmp, this));
			writing_to_self = FALSE;
		}
		else
		{
			decoder->RetrieveAttachementList(this);
		}
		Decode_Storage::SetFinished(force);
	}
}
コード例 #21
0
CString MovieTexture_FFMpeg::Init()
{
	CString sError = CreateDecoder();
	if( sError != "" )
		return sError;

	LOG->Trace("Bitrate: %i", decoder->m_stream->codec.bit_rate );
	LOG->Trace("Codec pixel format: %s", avcodec::avcodec_get_pix_fmt_name(decoder->m_stream->codec.pix_fmt) );

	/* Decode one frame, to guarantee that the texture is drawn when this function returns. */
	int ret = decoder->GetFrame();
	if( ret == -1 )
		return ssprintf( "%s: error getting first frame", GetID().filename.c_str() );
	if( ret == 0 )
	{
		/* There's nothing there. */
		return ssprintf( "%s: EOF getting first frame", GetID().filename.c_str() );
	}

	m_ImageWaiting = FRAME_DECODED;

	CreateTexture();
	LOG->Trace( "Resolution: %ix%i (%ix%i, %ix%i)",
			m_iSourceWidth, m_iSourceHeight,
			m_iImageWidth, m_iImageHeight, m_iTextureWidth, m_iTextureHeight );
	LOG->Trace( "Texture pixel format: %i", m_AVTexfmt );

	CreateFrameRects();

	ConvertFrame();
	UpdateFrame();

	CHECKPOINT;

	StartThread();

	return "";
}
コード例 #22
0
ファイル: H264Converter.cpp プロジェクト: luke-chang/gecko-1
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
                             const CreateDecoderParams& aParams)
  : mPDM(aPDM)
  , mOriginalConfig(aParams.VideoConfig())
  , mCurrentConfig(aParams.VideoConfig())
  , mKnowsCompositor(aParams.mKnowsCompositor)
  , mImageContainer(aParams.mImageContainer)
  , mTaskQueue(aParams.mTaskQueue)
  , mDecoder(nullptr)
  , mGMPCrashHelper(aParams.mCrashHelper)
  , mLastError(NS_OK)
  , mType(aParams.mType)
  , mOnWaitingForKeyEvent(aParams.mOnWaitingForKeyEvent)
  , mDecoderOptions(aParams.mOptions)
  , mRate(aParams.mRate)
{
  mLastError = CreateDecoder(mOriginalConfig, aParams.mDiagnostics);
  if (mDecoder) {
    MOZ_ASSERT(H264::HasSPS(mOriginalConfig.mExtraData));
    // The video metadata contains out of band SPS/PPS (AVC1) store it.
    mOriginalExtraData = mOriginalConfig.mExtraData;
  }
}
コード例 #23
0
ファイル: simple_test.cpp プロジェクト: cl-lei/openh264
 void SetUp() {
   long rv = CreateDecoder(&decoder_);
   ASSERT_EQ(0, rv);
   ASSERT_TRUE(decoder_);
 }
コード例 #24
0
static pj_status_t open_openh264_codec(openh264_private *ff,
                                     pj_mutex_t *ff_mutex)
{
    pjmedia_video_format_detail *vfd;
    pj_bool_t enc_opened = PJ_FALSE, dec_opened = PJ_FALSE;
    pj_status_t status;

    vfd = pjmedia_format_get_video_format_detail(&ff->param.enc_fmt, 
						 PJ_TRUE);

    /* Override generic params or apply specific params before opening
     * the codec.
     */
    if (ff->desc->preopen) {
		status = (*ff->desc->preopen)(ff);
		if (status != PJ_SUCCESS)
			goto on_error;
    }

    /* Open encoder */
    if (ff->param.dir & PJMEDIA_DIR_ENCODING) {
		int err;
		SEncParamExt *param = &ff->enc_param;
		const openh264_codec_desc *desc = &ff->desc[0];
		bool disable = 0;
		int iIndexLayer = 0;
		SSourcePicture *srcPic;

		pj_mutex_lock(ff_mutex);
		memset(param, 0x00, sizeof(SEncParamExt));
		CreateSVCEncoder(&ff->enc);
		
		/* Test for temporal, spatial, SNR scalability */
		param->fMaxFrameRate = (float)vfd->fps.num;		// input frame rate
		param->iPicWidth	= vfd->size.w;		// width of picture in samples
		param->iPicHeight	= vfd->size.h;		// height of picture in samples
		param->iTargetBitrate = desc->avg_bps;		// target bitrate desired
		param->bEnableRc = PJ_TRUE;           //  rc mode control
		param->iTemporalLayerNum = 3;	// layer number at temporal level
		param->iSpatialLayerNum	= 1;	// layer number at spatial level
		param->bEnableDenoise   = PJ_TRUE;    // denoise control
		param->bEnableBackgroundDetection = PJ_TRUE; // background detection control
		param->bEnableAdaptiveQuant       = PJ_TRUE; // adaptive quantization control
		param->bEnableFrameSkip           = PJ_TRUE; // frame skipping
		param->bEnableLongTermReference   = PJ_FALSE; // long term reference control
		param->bEnableFrameCroppingFlag   = PJ_FALSE;
		param->iLoopFilterDisableIdc = 0;

		param->iInputCsp			= videoFormatI420;			// color space of input sequence
		param->uiIntraPeriod		= 300;		// period of Intra frame
		param->bEnableSpsPpsIdAddition = 0;
		param->bPrefixNalAddingCtrl = 0;
		
		param->sSpatialLayers[iIndexLayer].iVideoWidth	= vfd->size.w;
		param->sSpatialLayers[iIndexLayer].iVideoHeight	= vfd->size.h;
		param->sSpatialLayers[iIndexLayer].fFrameRate	= (float)vfd->fps.num;		
		param->sSpatialLayers[iIndexLayer].iSpatialBitrate	= desc->avg_bps;
// 		param->sSpatialLayers[iIndexLayer].iDLayerQp = 50;
 		param->sSpatialLayers[iIndexLayer].uiProfileIdc = 66;
		param->sSpatialLayers[iIndexLayer].sSliceCfg.uiSliceMode = 4;
		param->sSpatialLayers[iIndexLayer].sSliceCfg.sSliceArgument.uiSliceSizeConstraint = PJMEDIA_MAX_VID_PAYLOAD_SIZE;

		err = callWelsEncoderFn(ff->enc)->InitializeExt(ff->enc, param);
		if (err == cmResultSuccess)
		{			
			callWelsEncoderFn(ff->enc)->SetOption(ff->enc, ENCODER_OPTION_ENABLE_SSEI, &disable);
			enc_opened = PJ_TRUE;
		}

		srcPic = malloc(sizeof(SSourcePicture));
		memset(srcPic, 0x00, sizeof(SSourcePicture));
		srcPic->iColorFormat = param->iInputCsp;
		srcPic->iPicWidth = param->iPicWidth;
		srcPic->iPicHeight = param->iPicHeight;
		srcPic->iStride[0] = param->iPicWidth;
		srcPic->iStride[1] = param->iPicWidth / 2;
		srcPic->iStride[2] = param->iPicWidth / 2;

		ff->srcPic = srcPic;
		pj_mutex_unlock(ff_mutex);				
    }

    /* Open decoder */
    if (ff->param.dir & PJMEDIA_DIR_DECODING) {
		SDecodingParam sDecParam = {0};

		pj_mutex_lock(ff_mutex);
		
		CreateDecoder(&ff->dec);
		sDecParam.iOutputColorFormat	= videoFormatI420;
		sDecParam.uiTargetDqLayer	= (unsigned char)-1;
		sDecParam.uiEcActiveFlag	= 1;
		sDecParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
 		callWelsDecoderFn(ff->dec)->Initialize(ff->dec, &sDecParam);

		pj_mutex_unlock(ff_mutex);
		dec_opened = PJ_TRUE;
    }

    /* Let the codec apply specific params after the codec opened */
    if (ff->desc->postopen) {
		status = (*ff->desc->postopen)(ff);
		if (status != PJ_SUCCESS)
			goto on_error;
    }

    return PJ_SUCCESS;

on_error:
    return status;
}
コード例 #25
0
ファイル: image.c プロジェクト: BossKing/vlc
static picture_t *ImageRead( image_handler_t *p_image, block_t *p_block,
                             video_format_t *p_fmt_in,
                             video_format_t *p_fmt_out )
{
    picture_t *p_pic = NULL, *p_tmp;

    /* Check if we can reuse the current decoder */
    if( p_image->p_dec &&
        p_image->p_dec->fmt_in.i_codec != p_fmt_in->i_chroma )
    {
        DeleteDecoder( p_image->p_dec );
        p_image->p_dec = 0;
    }

    /* Start a decoder */
    if( !p_image->p_dec )
    {
        p_image->p_dec = CreateDecoder( p_image->p_parent, p_fmt_in );
        if( !p_image->p_dec )
        {
            block_Release(p_block);
            return NULL;
        }
    }

    p_block->i_pts = p_block->i_dts = mdate();
    while( (p_tmp = p_image->p_dec->pf_decode_video( p_image->p_dec, &p_block ))
             != NULL )
    {
        if( p_pic != NULL )
            picture_Release( p_pic );
        p_pic = p_tmp;
    }

    if( p_pic == NULL )
    {
        msg_Warn( p_image->p_parent, "no image decoded" );
        return 0;
    }

    if( !p_fmt_out->i_chroma )
        p_fmt_out->i_chroma = p_image->p_dec->fmt_out.video.i_chroma;
    if( !p_fmt_out->i_width && p_fmt_out->i_height )
        p_fmt_out->i_width = (int64_t)p_image->p_dec->fmt_out.video.i_width *
                             p_image->p_dec->fmt_out.video.i_sar_num *
                             p_fmt_out->i_height /
                             p_image->p_dec->fmt_out.video.i_height /
                             p_image->p_dec->fmt_out.video.i_sar_den;

    if( !p_fmt_out->i_height && p_fmt_out->i_width )
        p_fmt_out->i_height = (int64_t)p_image->p_dec->fmt_out.video.i_height *
                              p_image->p_dec->fmt_out.video.i_sar_den *
                              p_fmt_out->i_width /
                              p_image->p_dec->fmt_out.video.i_width /
                              p_image->p_dec->fmt_out.video.i_sar_num;
    if( !p_fmt_out->i_width )
        p_fmt_out->i_width = p_image->p_dec->fmt_out.video.i_width;
    if( !p_fmt_out->i_height )
        p_fmt_out->i_height = p_image->p_dec->fmt_out.video.i_height;
    if( !p_fmt_out->i_visible_width )
        p_fmt_out->i_visible_width = p_fmt_out->i_width;
    if( !p_fmt_out->i_visible_height )
        p_fmt_out->i_visible_height = p_fmt_out->i_height;

    /* Check if we need chroma conversion or resizing */
    if( p_image->p_dec->fmt_out.video.i_chroma != p_fmt_out->i_chroma ||
        p_image->p_dec->fmt_out.video.i_width != p_fmt_out->i_width ||
        p_image->p_dec->fmt_out.video.i_height != p_fmt_out->i_height )
    {
        if( p_image->p_filter )
        if( p_image->p_filter->fmt_in.video.i_chroma !=
            p_image->p_dec->fmt_out.video.i_chroma ||
            p_image->p_filter->fmt_out.video.i_chroma != p_fmt_out->i_chroma )
        {
            /* We need to restart a new filter */
            DeleteFilter( p_image->p_filter );
            p_image->p_filter = 0;
        }

        /* Start a filter */
        if( !p_image->p_filter )
        {
            p_image->p_filter =
                CreateFilter( p_image->p_parent, &p_image->p_dec->fmt_out,
                              p_fmt_out );

            if( !p_image->p_filter )
            {
                picture_Release( p_pic );
                return NULL;
            }
        }
        else
        {
            /* Filters should handle on-the-fly size changes */
            p_image->p_filter->fmt_in = p_image->p_dec->fmt_out;
            p_image->p_filter->fmt_out = p_image->p_dec->fmt_out;
            p_image->p_filter->fmt_out.i_codec = p_fmt_out->i_chroma;
            p_image->p_filter->fmt_out.video = *p_fmt_out;
        }

        p_pic = p_image->p_filter->pf_video_filter( p_image->p_filter, p_pic );
        *p_fmt_out = p_image->p_filter->fmt_out.video;
    }
    else *p_fmt_out = p_image->p_dec->fmt_out.video;

    return p_pic;
}
コード例 #26
0
ファイル: mime_cs.cpp プロジェクト: prestocore/browser
void DecodedMIME_Storage::WriteToDecoder(const unsigned char *source, unsigned long source_len)
{
	OP_STATUS op_err;
	const unsigned char * OP_MEMORY_VAR src = source;
	OP_MEMORY_VAR unsigned long src_len = source_len;

	if(!decoder)
	{
		TRAP(op_err, data.WriteDataL(src, src_len));

		src = data.GetDirectPayload();
		src_len = data.GetLength();

		OP_MEMORY_VAR unsigned long header_len = 0;
		const unsigned char *current = src;
		BOOL found_end_of_header = FALSE;

		while(header_len < src_len)
		{
			header_len++;
			if(*(current++) == '\n')
			{
				if(header_len < src_len)
				{
					if(*current == '\r')
					{
						current ++;
						if(header_len >= src_len)
							break;
						header_len ++;
					}
					if(*current == '\n')
					{
						header_len++;
						found_end_of_header = TRUE;
						break;
					}
				}
			}
		}

		if(!found_end_of_header )
			return;

		CreateDecoder(src, header_len);
		if(!decoder)
			return;

		decoder->SetPreferPlaintext(prefer_plain);
		decoder->SetIgnoreWarnings(ignore_warnings);
		decoder->SetUseNoStoreFlag(!!url->GetAttribute(URL::KCachePolicy_NoStore));

		TRAP(op_err, decoder->LoadDataL(src+ header_len,src_len - header_len));
		if(OpStatus::IsError(op_err))
		{
			url->HandleError(MIME_ERRSTR(SI,ERR_CACHE_INTERNAL_ERROR));
			return;
		}
		TRAP(op_err, data.CommitSampledBytesL(src_len));
		if(OpStatus::IsError(op_err))
		{
			url->HandleError(MIME_ERRSTR(SI,ERR_CACHE_INTERNAL_ERROR));
			return;
		}
		return;
	}

	decoder->SetForceCharset(charset_id);
	TRAP(op_err, decoder->LoadDataL(src,src_len));

	if(!decode_only)
	{
		writing_to_self = TRUE;
		URL tmp(url, (char *) NULL);
		TRAP(op_err, decoder->RetrieveDataL(tmp, this));
		writing_to_self = FALSE;
		if((URLType) url->GetAttribute(URL::KType) == URL_EMAIL && url->GetDataStorage())
		{
			URL_DataStorage *url_ds = url->GetDataStorage();
			if(!url_ds->GetAttribute(URL::KHeaderLoaded))
			{
				url_ds->BroadcastMessage(MSG_HEADER_LOADED, url->GetID(), 0, MH_LIST_ALL);
				url_ds->SetAttribute(URL::KHeaderLoaded,TRUE);
			}

			url_ds->BroadcastMessage(MSG_URL_DATA_LOADED, url->GetID(), 0, MH_LIST_ALL);
		}
	}
}
コード例 #27
0
ファイル: image.c プロジェクト: tguillem/vlc
static picture_t *ImageRead( image_handler_t *p_image, block_t *p_block,
                             const video_format_t *p_fmt_in,
                             video_format_t *p_fmt_out )
{
    picture_t *p_pic = NULL;

    /* Check if we can reuse the current decoder */
    if( p_image->p_dec &&
        p_image->p_dec->fmt_in.i_codec != p_fmt_in->i_chroma )
    {
        DeleteDecoder( p_image->p_dec );
        p_image->p_dec = 0;
    }

    /* Start a decoder */
    if( !p_image->p_dec )
    {
        p_image->p_dec = CreateDecoder( p_image->p_parent, p_fmt_in );
        if( !p_image->p_dec )
        {
            block_Release(p_block);
            return NULL;
        }
        if( p_image->p_dec->fmt_out.i_cat != VIDEO_ES )
        {
            DeleteDecoder( p_image->p_dec );
            p_image->p_dec = NULL;
            block_Release(p_block);
            return NULL;
        }
        p_image->p_dec->pf_queue_video = ImageQueueVideo;
        p_image->p_dec->p_queue_ctx = p_image;
    }

    p_block->i_pts = p_block->i_dts = mdate();
    int ret = p_image->p_dec->pf_decode( p_image->p_dec, p_block );
    if( ret == VLCDEC_SUCCESS )
    {
        /* Drain */
        p_image->p_dec->pf_decode( p_image->p_dec, NULL );

        p_pic = picture_fifo_Pop( p_image->outfifo );

        unsigned lostcount = 0;
        picture_t *lostpic;
        while( ( lostpic = picture_fifo_Pop( p_image->outfifo ) ) != NULL )
        {
            picture_Release( lostpic );
            lostcount++;
        }
        if( lostcount > 0 )
            msg_Warn( p_image->p_parent, "Image decoder output more than one "
                      "picture (%d)", lostcount );
    }

    if( p_pic == NULL )
    {
        msg_Warn( p_image->p_parent, "no image decoded" );
        return 0;
    }

    if( !p_fmt_out->i_chroma )
        p_fmt_out->i_chroma = p_image->p_dec->fmt_out.video.i_chroma;
    if( !p_fmt_out->i_width && p_fmt_out->i_height )
        p_fmt_out->i_width = (int64_t)p_image->p_dec->fmt_out.video.i_width *
                             p_image->p_dec->fmt_out.video.i_sar_num *
                             p_fmt_out->i_height /
                             p_image->p_dec->fmt_out.video.i_height /
                             p_image->p_dec->fmt_out.video.i_sar_den;

    if( !p_fmt_out->i_height && p_fmt_out->i_width )
        p_fmt_out->i_height = (int64_t)p_image->p_dec->fmt_out.video.i_height *
                              p_image->p_dec->fmt_out.video.i_sar_den *
                              p_fmt_out->i_width /
                              p_image->p_dec->fmt_out.video.i_width /
                              p_image->p_dec->fmt_out.video.i_sar_num;
    if( !p_fmt_out->i_width )
        p_fmt_out->i_width = p_image->p_dec->fmt_out.video.i_width;
    if( !p_fmt_out->i_height )
        p_fmt_out->i_height = p_image->p_dec->fmt_out.video.i_height;
    if( !p_fmt_out->i_visible_width )
        p_fmt_out->i_visible_width = p_fmt_out->i_width;
    if( !p_fmt_out->i_visible_height )
        p_fmt_out->i_visible_height = p_fmt_out->i_height;

    /* Check if we need chroma conversion or resizing */
    if( p_image->p_dec->fmt_out.video.i_chroma != p_fmt_out->i_chroma ||
        p_image->p_dec->fmt_out.video.i_width != p_fmt_out->i_width ||
        p_image->p_dec->fmt_out.video.i_height != p_fmt_out->i_height )
    {
        if( p_image->p_filter )
        if( p_image->p_filter->fmt_in.video.i_chroma !=
            p_image->p_dec->fmt_out.video.i_chroma ||
            p_image->p_filter->fmt_out.video.i_chroma != p_fmt_out->i_chroma )
        {
            /* We need to restart a new filter */
            DeleteFilter( p_image->p_filter );
            p_image->p_filter = 0;
        }

        /* Start a filter */
        if( !p_image->p_filter )
        {
            p_image->p_filter =
                CreateFilter( p_image->p_parent, &p_image->p_dec->fmt_out,
                              p_fmt_out );

            if( !p_image->p_filter )
            {
                picture_Release( p_pic );
                return NULL;
            }
        }
        else
        {
            /* Filters should handle on-the-fly size changes */
            p_image->p_filter->fmt_in = p_image->p_dec->fmt_out;
            p_image->p_filter->fmt_out = p_image->p_dec->fmt_out;
            p_image->p_filter->fmt_out.i_codec = p_fmt_out->i_chroma;
            p_image->p_filter->fmt_out.video = *p_fmt_out;
        }

        p_pic = p_image->p_filter->pf_video_filter( p_image->p_filter, p_pic );

        video_format_Clean( p_fmt_out );
        video_format_Copy( p_fmt_out, &p_image->p_filter->fmt_out.video );
    }
    else
    {
        video_format_Clean( p_fmt_out );
        video_format_Copy( p_fmt_out, &p_image->p_dec->fmt_out.video );
    }

    return p_pic;
}
コード例 #28
0
int CPDF_DIBSource::StartLoadDIBSource(CPDF_Document* pDoc,
                                       const CPDF_Stream* pStream,
                                       bool bHasMask,
                                       CPDF_Dictionary* pFormResources,
                                       CPDF_Dictionary* pPageResources,
                                       bool bStdCS,
                                       uint32_t GroupFamily,
                                       bool bLoadMask) {
  if (!pStream) {
    return 0;
  }
  m_pDocument = pDoc;
  m_pDict = pStream->GetDict();
  m_pStream = pStream;
  m_bStdCS = bStdCS;
  m_bHasMask = bHasMask;
  m_Width = m_pDict->GetIntegerFor("Width");
  m_Height = m_pDict->GetIntegerFor("Height");
  if (m_Width <= 0 || m_Height <= 0 || m_Width > kMaxImageDimension ||
      m_Height > kMaxImageDimension) {
    return 0;
  }
  m_GroupFamily = GroupFamily;
  m_bLoadMask = bLoadMask;
  if (!LoadColorInfo(m_pStream->IsInline() ? pFormResources : nullptr,
                     pPageResources)) {
    return 0;
  }
  if (m_bDoBpcCheck && (m_bpc == 0 || m_nComponents == 0)) {
    return 0;
  }
  FX_SAFE_UINT32 src_size =
      CalculatePitch8(m_bpc, m_nComponents, m_Width) * m_Height;
  if (!src_size.IsValid()) {
    return 0;
  }
  m_pStreamAcc = pdfium::MakeUnique<CPDF_StreamAcc>();
  m_pStreamAcc->LoadAllData(pStream, false, src_size.ValueOrDie(), true);
  if (m_pStreamAcc->GetSize() == 0 || !m_pStreamAcc->GetData()) {
    return 0;
  }
  int ret = CreateDecoder();
  if (!ret)
    return ret;

  if (ret != 1) {
    if (!ContinueToLoadMask()) {
      return 0;
    }
    if (m_bHasMask) {
      StratLoadMask();
    }
    return ret;
  }
  if (!ContinueToLoadMask()) {
    return 0;
  }
  if (m_bHasMask) {
    ret = StratLoadMask();
  }
  if (ret == 2) {
    return ret;
  }
  if (m_pColorSpace && m_bStdCS) {
    m_pColorSpace->EnableStdConversion(false);
  }
  return ret;
}
コード例 #29
0
bool CPDF_DIBSource::Load(CPDF_Document* pDoc, const CPDF_Stream* pStream) {
  if (!pStream)
    return false;

  m_pDocument = pDoc;
  m_pDict = pStream->GetDict();
  if (!m_pDict)
    return false;

  m_pStream = pStream;
  m_Width = m_pDict->GetIntegerFor("Width");
  m_Height = m_pDict->GetIntegerFor("Height");
  if (m_Width <= 0 || m_Height <= 0 || m_Width > kMaxImageDimension ||
      m_Height > kMaxImageDimension) {
    return false;
  }
  m_GroupFamily = 0;
  m_bLoadMask = false;
  if (!LoadColorInfo(nullptr, nullptr))
    return false;

  if (m_bDoBpcCheck && (m_bpc == 0 || m_nComponents == 0))
    return false;

  FX_SAFE_UINT32 src_size =
      CalculatePitch8(m_bpc, m_nComponents, m_Width) * m_Height;
  if (!src_size.IsValid())
    return false;

  m_pStreamAcc = pdfium::MakeUnique<CPDF_StreamAcc>();
  m_pStreamAcc->LoadAllData(pStream, false, src_size.ValueOrDie(), true);
  if (m_pStreamAcc->GetSize() == 0 || !m_pStreamAcc->GetData())
    return false;

  if (!CreateDecoder())
    return false;

  if (m_bImageMask) {
    m_bpp = 1;
    m_bpc = 1;
    m_nComponents = 1;
    m_AlphaFlag = 1;
  } else if (m_bpc * m_nComponents == 1) {
    m_bpp = 1;
  } else if (m_bpc * m_nComponents <= 8) {
    m_bpp = 8;
  } else {
    m_bpp = 24;
  }
  FX_SAFE_UINT32 pitch = CalculatePitch32(m_bpp, m_Width);
  if (!pitch.IsValid())
    return false;

  m_pLineBuf = FX_Alloc(uint8_t, pitch.ValueOrDie());
  LoadPalette();
  if (m_bColorKey) {
    m_bpp = 32;
    m_AlphaFlag = 2;
    pitch = CalculatePitch32(m_bpp, m_Width);
    if (!pitch.IsValid())
      return false;

    m_pMaskedLine = FX_Alloc(uint8_t, pitch.ValueOrDie());
  }
  m_Pitch = pitch.ValueOrDie();
  return true;
}