Exemplo n.º 1
0
	STDMETHODIMP GetLocaleID(LCID* pLocaleID)
	{
		CheckPointer(pLocaleID, E_POINTER);
		*pLocaleID = ::GetUserDefaultLCID();
		return S_OK;
	}
Exemplo n.º 2
0
void IterateKeff()
{
  long ptr, mode, skip, ncyc, idx, fix;
  double nsf, fiss, capt, nuxn, leak, L0, L1, val, f, keff, val0;
  
  /* Check mode */

  if ((mode = (long)RDB[DATA_ITER_MODE]) == ITER_MODE_NONE)
    return;

  /* Get fix mode */

  fix = (long)RDB[DATA_ITER_FIX];

  /* Number of cycles and actual number of skip cycles (setoptimization.c) */

  ncyc = (long)RDB[DATA_ITER_NCYC];
  idx = (long)RDB[DATA_CYCLE_IDX];

  if (fix == YES)
    skip = (long)((RDB[DATA_CRIT_SKIP] - RDB[DATA_ITER_NCYC])/2.0);
  else
    skip = (long)(RDB[DATA_CRIT_SKIP] - RDB[DATA_ITER_NCYC]);

  /* Check cycles */

  if ((idx < skip) || ((fix == YES) && (idx > skip + ncyc)))
    return;

  /* Reduce scoring buffer */

  ReduceBuffer();

  /* Collect MPI parallel data */

  CollectBuf();

  /* Check mode */

  if (mode == ITER_MODE_ALBEDO)
    {
      /***********************************************************************/

      /***** Albedo iteration ************************************************/

      /* Get k-eff */

      keff = RDB[DATA_ITER_KEFF];
      CheckValue(FUNCTION_NAME, "keff", "", keff, 0.1, 2.5);

      /* Fission nubar */
      
      ptr = (long)RDB[RES_TOT_NSF];
      CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
      nsf = BufVal(ptr, 0);
      
      /* Fission term */
      
      ptr = (long)RDB[RES_TOT_FISSRATE];
      CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
      fiss = BufVal(ptr, 0);
      
      /* Total capture rate */
      
      ptr = (long)RDB[RES_TOT_CAPTRATE];
      CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
      capt = BufVal(ptr, 0);
      
      /* Scattering production rate */
      
      ptr = (long)RDB[RES_TOT_INLPRODRATE];
      CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
      nuxn = BufVal(ptr, 0);

      /* Physical leakage rate */

      ptr = (long)RDB[RES_TOT_NEUTRON_LEAKRATE];
      CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
      leak = BufVal(ptr, 0);

      /* Get previous albedo leakage rate */
      
      ptr = (long)RDB[RES_ALB_NEUTRON_LEAKRATE];
      CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
      L0 = BufVal(ptr, 0);
     
      /* Calculate estimate for new albedo leakage rate */
      
      L1 = nsf/keff - capt - fiss - leak + nuxn;

      /* Avoid compiler warning */

      val = -1.0;

      /* Get previous value */

      if ((val0 = RDB[DATA_ITER_VAL]) < 0.0)
	{
	  /* Not set, use initial guess */

	  ptr = (long)RDB[RES_ANA_KEFF];
	  CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);

	  if (Mean(ptr, 0) > 1.0)
	    val = 0.999;
	  else
	    val = 1.001;
	}
      else if (L0 != 0.0)
	{
	  /* Calculate new */

	  val = (val0 - 1.0)*L1/L0 + 1.0;

	  /* Add to statistics */

	  ptr = (long)RDB[RES_ITER_VAL];
	  CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
	  AddStat(val, ptr, 0);
	  
	  /* Fix value for last iteration */
	  
	  if ((fix == YES) && (idx > skip + ncyc))
	    val = Mean(ptr, 0);
	}    
      else
	Die(FUNCTION_NAME, "L0 == 0");

      /* Put value */

      WDB[DATA_ITER_VAL] = val;

      /* Put albedos */

      if ((f = RDB[DATA_ITER_ALB_F1]) > 0.0)
	WDB[DATA_GEOM_ALBEDO1] = val*f;

      if ((f = RDB[DATA_ITER_ALB_F2]) > 0.0)
	WDB[DATA_GEOM_ALBEDO2] = val*f;
      
      if ((f = RDB[DATA_ITER_ALB_F3]) > 0.0)
	WDB[DATA_GEOM_ALBEDO3] = val*f;
      
      /***********************************************************************/
    }
  else
    Die(FUNCTION_NAME, "Invalid iteration mode");
}
Exemplo n.º 3
0
// we need to return an addrefed allocator, even if it is the preferred
// one, since he doesn't know whether it is the preferred one or not.
STDMETHODIMP 
CAsyncOutputPin::RequestAllocator(
    IMemAllocator* pPreferred,
    ALLOCATOR_PROPERTIES* pProps,
    IMemAllocator ** ppActual)
{
    CheckPointer(pPreferred,E_POINTER);
    CheckPointer(pProps,E_POINTER);
    CheckPointer(ppActual,E_POINTER);
    ASSERT(m_pIo);

    // we care about alignment but nothing else
    if(!pProps->cbAlign || !m_pIo->IsAligned(pProps->cbAlign))
    {
        m_pIo->Alignment(&pProps->cbAlign);
    }

    ALLOCATOR_PROPERTIES Actual;
    HRESULT hr;

    if(pPreferred)
    {
        hr = pPreferred->SetProperties(pProps, &Actual);

        if(SUCCEEDED(hr) && m_pIo->IsAligned(Actual.cbAlign))
        {
            pPreferred->AddRef();
            *ppActual = pPreferred;
            return S_OK;
        }
    }

    // create our own allocator
    IMemAllocator* pAlloc;
    hr = InitAllocator(&pAlloc);
    if(FAILED(hr))
    {
        return hr;
    }

    //...and see if we can make it suitable
    hr = pAlloc->SetProperties(pProps, &Actual);
    if(SUCCEEDED(hr) && m_pIo->IsAligned(Actual.cbAlign))
    {
        // we need to release our refcount on pAlloc, and addref
        // it to pass a refcount to the caller - this is a net nothing.
        *ppActual = pAlloc;
        return S_OK;
    }

    // failed to find a suitable allocator
    pAlloc->Release();

    // if we failed because of the IsAligned test, the error code will
    // not be failure
    if(SUCCEEDED(hr))
    {
        hr = VFW_E_BADALIGN;
    }
    return hr;
}
Exemplo n.º 4
0
HRESULT xDSVideoRenderer::FillRGBWithYUY2(IMediaSample * pSample)
{
    unsigned char* pDstData = NULL;
    unsigned char* pSrcData = NULL;

    IVideoRenderTarget::VRT_LockRect RTRect;

    int lDstPitch = 0;
    int lSrcPitch = 0;
    int Bbp = 0;


    if(m_pRenderTarget == NULL)
        return S_OK;

    m_pRenderTarget->onStartRender();
    m_pRenderTarget->lock(RTRect);

    pDstData = RTRect.mPixel;

    CheckPointer(pSample,E_POINTER);
    pSample->GetPointer( &pSrcData);

    lDstPitch = RTRect.mPicth ;
    lSrcPitch = m_lVidWidth * 2;//m_lVidPitch;


    Bbp = RTRect.mBytePerPixel;
    unsigned char* pSrcLine = pSrcData;
    unsigned char* pDstLine = pDstData;// + (RTRect.mHeight - 1) * RTRect.mWidth * Bbp;

    ////if(m_pRenderTarget->flipY())
    //{
    //	pDstLine += (RTRect.mHeight - 1) * lDstPitch;
    //	lDstPitch =- lDstPitch;
    //}

    int _R = 0 , _G = 1 , _B = 2, _A = 3;
    //if(m_pRenderTarget->flipRGBA() )
    //{
    //	_B = 0;	_G = 1;	_R = 2;	_A = 3;
    //}

    if(pSrcData == NULL || pDstData == NULL)
    {
#ifdef _DEBUG
        OutputDebugString(L"CVideoRender DoSampler 的Buffer为空\n");
#endif
        m_pRenderTarget->unlock();
        return E_FAIL;
    }

    unsigned int  c_y =  0;
    int  c_u =  0;
    int  c_v =  0;

    if(Bbp == 3)
    {
        _A = 0;
    }
    if(Bbp == 3 || Bbp == 4)
    {
        for(int y = 0 ; y < RTRect.mHeight ; ++y)
        {
            for(int x = 0 ; x < RTRect.mWidth ; ++x)
            {
                int src_y_idx = 2 *  x;
                c_y = pSrcLine[src_y_idx];
                if(x%2)
                {
                    c_u = ((unsigned char*)pSrcLine)[src_y_idx-1];
                    c_v = ((unsigned char*)pSrcLine)[src_y_idx+1];
                }
                else
                {
                    c_u = ((unsigned char*)pSrcLine)[src_y_idx+1];
                    c_v = ((unsigned char*)pSrcLine)[src_y_idx+3];
                }

                int r = 	(int)(1.f * (c_y - 16)                        + 1.14f  * (c_v - 128) );  //R
                int g = 	(int)(1.f * (c_y - 16) - 0.390f * (c_u - 128) - 0.58f * (c_v - 128)  );  //G
                int b = 	(int)(1.f * (c_y - 16) + 2.03f  * (c_u - 128)                        );  //B

                pDstLine[Bbp * x + _A] = 255;
                pDstLine[Bbp * x + _R] = CLIP(r);
                pDstLine[Bbp * x + _G] = CLIP(g);
                pDstLine[Bbp * x + _B] = CLIP(b);

            }
            pSrcLine += lSrcPitch;
            pDstLine += lDstPitch;
        }

    }
    else
    {
        m_pRenderTarget->unlock();
        m_pRenderTarget->onEndRender(false);
        return E_FAIL;
    }
    //解锁 Video的RenderTarget
    m_pRenderTarget->unlock();
    m_pRenderTarget->onEndRender(true);
    return S_OK;
}
Exemplo n.º 5
0
HRESULT StaticSourceVideoPin::FillBuffer(IMediaSample * pSample)
{
	CheckPointer(pSample, E_POINTER);

	HRESULT hr = S_OK;
	DWORD frameDataCount;
	BYTE * frameData;

	//Nastavenie hodnoty casu zaciatku a konca snimky
	REFERENCE_TIME rtStart = this->m_rtLastFrame;
	REFERENCE_TIME rtStop  = rtStart + this->m_pFilter->m_params->m_rtFrameLength;
	pSample->SetTime(&rtStart, &rtStop);

	if (this->m_pFilter->m_rtStop > 0 && rtStop >= this->m_pFilter->m_rtStop)
	{
		//Ak je nastaveny cas konca a prekroci sa, ukonci sa stream
		hr = S_FALSE;
		goto done;
	}
	this->m_rtLastFrame = rtStop;

    CHECK_HR(hr = pSample->GetPointer(&frameData));
	frameDataCount = pSample->GetSize();

	//Ak je nastavena bitmapa, pouzi tu, inak nastav sum
	if (this->m_pFilter->m_params->m_bitmapData == NULL)
	{
		for (DWORD i = 0; i < frameDataCount; i++)
			frameData[i] = (BYTE)(rand() % 256);
	}
	else
	{
		if (this->m_mt.subtype == MEDIASUBTYPE_RGB32)
		{
			//Na vystup ide RGB32 typ
			if (this->m_pFilter->m_params->m_bitmapInfo.biBitCount == 32)
			{
				CopyMemory(frameData, this->m_pFilter->m_params->m_bitmapData, frameDataCount);
			}
			else
			{
				BITMAPINFOHEADER dstBmi = this->m_pFilter->m_params->m_bitmapInfo;
				dstBmi.biBitCount = 32;

				RGBtoRGB(this->m_pFilter->m_params->m_bitmapInfo, this->m_pFilter->m_params->m_bitmapData, frameData, dstBmi);
			}
		}
		else if (this->m_mt.subtype == MEDIASUBTYPE_RGB24)
		{
			//Na vystup ide RGB24 typ
			if (this->m_pFilter->m_params->m_bitmapInfo.biBitCount == 24)
			{
				CopyMemory(frameData, this->m_pFilter->m_params->m_bitmapData, frameDataCount);
			}
			else
			{
				BITMAPINFOHEADER dstBmi = this->m_pFilter->m_params->m_bitmapInfo;
				dstBmi.biBitCount = 24;

				RGBtoRGB(this->m_pFilter->m_params->m_bitmapInfo, this->m_pFilter->m_params->m_bitmapData, frameData, dstBmi);
			}
		}
		else if (this->m_mt.subtype == MEDIASUBTYPE_YUY2)
		{
			//Na vystup ide YUY2 typ
			RGBtoYUY2(this->m_pFilter->m_params->m_bitmapInfo, this->m_pFilter->m_params->m_bitmapData, frameData);
		}
		else if (this->m_mt.subtype == MEDIASUBTYPE_YV12)
		{
			//Na vystup ide YV12 typ
			RGBtoYV12(this->m_pFilter->m_params->m_bitmapInfo, this->m_pFilter->m_params->m_bitmapData, frameData);
		}
	}

    CHECK_HR(hr = pSample->SetSyncPoint(TRUE));
	CHECK_HR(hr = pSample->SetActualDataLength(frameDataCount));
	
done:
    return hr;
}
Exemplo n.º 6
0
STDMETHODIMP CBaseSplitterFilter::get_MarkerCount(long* pMarkerCount)
{
	CheckPointer(pMarkerCount, E_POINTER);
	*pMarkerCount = (long)ChapGetCount();
	return S_OK;
}
Exemplo n.º 7
0
STDMETHODIMP CDSMSplitterFilter::GetKeyFrameCount(UINT& nKFs)
{
    CheckPointer(m_pFile, E_UNEXPECTED);
    nKFs = (UINT)m_pFile->m_sps.GetCount();
    return S_OK;
}
HRESULT CPushPinDesktop::FillBuffer(IMediaSample *pSample)
{
	__int64 startThisRound = StartCounter();
	BYTE *pData;

    CheckPointer(pSample, E_POINTER);
	if(m_bReReadRegistry) {
	  reReadCurrentPosition(1);
	}

    // Access the sample's data buffer
    pSample->GetPointer(&pData);

    // Make sure that we're still using video format
    ASSERT(m_mt.formattype == FORMAT_VideoInfo);

    VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) m_mt.pbFormat;

	// for some reason the timings are messed up initially, as there's no start time at all for the first frame (?) we don't start in State_Running ?
	// race condition?
	// so don't do some calculations unless we're in State_Running
	FILTER_STATE myState;
	CSourceStream::m_pFilter->GetState(INFINITE, &myState);
	bool fullyStarted = myState == State_Running;
	
	boolean gotNew = false;
	while(!gotNew) {

      CopyScreenToDataBlock(hScrDc, pData, (BITMAPINFO *) &(pVih->bmiHeader), pSample);
	
	  if(m_bDeDupe) {
			if(memcmp(pData, pOldData, pSample->GetSize())==0) { // took desktop:  10ms for 640x1152, still 100 fps uh guess...
			  Sleep(m_millisToSleepBeforePollForChanges);
			} else {
			  gotNew = true;
			  memcpy( /* dest */ pOldData, pData, pSample->GetSize()); // took 4ms for 640x1152, but it's worth it LOL.
			  // LODO memcmp and memcpy in the same loop LOL.
			}
	  } else {
		// it's always new for everyone else!
	    gotNew = true;
	  }
	}
	// capture how long it took before we add in our own arbitrary delay to enforce fps...
	long double millisThisRoundTook = GetCounterSinceStartMillis(startThisRound);
	fastestRoundMillis = min(millisThisRoundTook, fastestRoundMillis); // keep stats :)
	sumMillisTook += millisThisRoundTook;

	CRefTime now;
	CRefTime endFrame;
    CSourceStream::m_pFilter->StreamTime(now);

    // wait until we "should" send this frame out...
	if((now > 0) && (now < previousFrameEndTime)) { // now > 0 to accomodate for if there is no reference graph clock at all...also boot strap time ignore it :P
		while(now < previousFrameEndTime) { // guarantees monotonicity too :P
		  Sleep(1);
          CSourceStream::m_pFilter->StreamTime(now);
		}
		// avoid a tidge of creep since we sleep until [typically] just past the previous end.
		endFrame = previousFrameEndTime + m_rtFrameLength;
	    previousFrameEndTime = endFrame;
	    
	} else {
	  if(show_performance)
	    LocalOutput("it missed a frame--can't keep up %d", countMissed++); // we don't miss time typically I don't think, unless de-dupe is turned on, or aero, or slow computer, buffering problems downstream, etc.
	  // have to add a bit here, or it will always be "it missed some time" for the next round...forever!
	  endFrame = now + m_rtFrameLength;
	  // most of this stuff I just made up because it "sounded right"
	  //LocalOutput("checking to see if I can catch up again now: %llu previous end: %llu subtr: %llu %i", now, previousFrameEndTime, previousFrameEndTime - m_rtFrameLength, previousFrameEndTime - m_rtFrameLength);
	  if(now > (previousFrameEndTime - (long long) m_rtFrameLength)) { // do I need a long long cast?
		// let it pretend and try to catch up, it's not quite a frame behind
        previousFrameEndTime = previousFrameEndTime + m_rtFrameLength;
	  } else {
		endFrame = now + m_rtFrameLength/2; // ?? seems to work...I guess...
		previousFrameEndTime = endFrame;
	  }
	    
	}
	previousFrameEndTime = max(0, previousFrameEndTime);// avoid startup negatives, which would kill our math on the next loop...
    
	// LocalOutput("marking frame with timestamps: %llu %llu", now, endFrame);
    pSample->SetTime((REFERENCE_TIME *) &now, (REFERENCE_TIME *) &endFrame);
	//pSample->SetMediaTime((REFERENCE_TIME *)&now, (REFERENCE_TIME *) &endFrame); //useless seemingly

	if(fullyStarted) {
      m_iFrameNumber++;
	}

	// Set TRUE on every sample for uncompressed frames http://msdn.microsoft.com/en-us/library/windows/desktop/dd407021%28v=vs.85%29.aspx
    pSample->SetSyncPoint(TRUE);

	// only set discontinuous for the first...I think...
	pSample->SetDiscontinuity(m_iFrameNumber <= 1);

    // the swprintf costs like 0.04ms (25000 fps LOL)
	m_fFpsSinceBeginningOfTime = ((double) m_iFrameNumber)/(GetTickCount() - globalStart)*1000;
	swprintf(out, L"done frame! total frames: %d this one %dx%d -> (%dx%d) took: %.02Lfms, %.02f ave fps (%.02f is the theoretical max fps based on this round, ave. possible fps %.02f, fastest round fps %.02f, negotiated fps %.06f), frame missed %d", 
		m_iFrameNumber, m_iCaptureConfigHeight, m_iCaptureConfigWidth, getNegotiatedFinalWidth(), getNegotiatedFinalHeight(), millisThisRoundTook, m_fFpsSinceBeginningOfTime, 1.0*1000/millisThisRoundTook,   
		/* average */ 1.0*1000*m_iFrameNumber/sumMillisTook, 1.0*1000/fastestRoundMillis, GetFps(), countMissed);
//#ifdef _DEBUG // probably not worth it but we do hit this a lot...hmm...
	LocalOutput(out);
	set_config_string_setting(L"frame_stats", out);
//#endif
    return S_OK;
}
//
// DecideBufferSize
//
// This will always be called after the format has been sucessfully
// negotiated (this is negotiatebuffersize). So we have a look at m_mt to see what size image we agreed.
// Then we can ask for buffers of the correct size to contain them.
//
HRESULT CPushPinDesktop::DecideBufferSize(IMemAllocator *pAlloc,
                                      ALLOCATOR_PROPERTIES *pProperties)
{
    CheckPointer(pAlloc,E_POINTER);
    CheckPointer(pProperties,E_POINTER);

    CAutoLock cAutoLock(m_pFilter->pStateLock());
    HRESULT hr = NOERROR;

    VIDEOINFO *pvi = (VIDEOINFO *) m_mt.Format();
	BITMAPINFOHEADER header = pvi->bmiHeader;
	ASSERT(header.biPlanes == 1); // sanity check
	// ASSERT(header.biCompression == 0); // meaning "none" sanity check, unless we are allowing for BI_BITFIELDS [?]
	// now try to avoid this crash [XP, VLC 1.1.11]: vlc -vvv dshow:// :dshow-vdev="screen-capture-recorder" :dshow-adev --sout  "#transcode{venc=theora,vcodec=theo,vb=512,scale=0.7,acodec=vorb,ab=128,channels=2,samplerate=44100,audio-sync}:standard{access=file,mux=ogg,dst=test.ogv}" with 10x10 or 1000x1000
	// LODO check if biClrUsed is passed in right for 16 bit [I'd guess it is...]
	// pProperties->cbBuffer = pvi->bmiHeader.biSizeImage; // too small. Apparently *way* too small.
	
	int bytesPerLine;
	// there may be a windows method that would do this for us...GetBitmapSize(&header); but might be too small for VLC? LODO try it :)
	// some pasted code...
	int bytesPerPixel = (header.biBitCount/8);
	if(m_bConvertToI420) {
	  bytesPerPixel = 32/8; // we convert from a 32 bit to i420, so need more space in this case
	}

    bytesPerLine = header.biWidth * bytesPerPixel;
    /* round up to a dword boundary */
    if (bytesPerLine & 0x0003) 
    {
      bytesPerLine |= 0x0003;
      ++bytesPerLine;
    }

	ASSERT(header.biHeight > 0); // sanity check
	ASSERT(header.biWidth > 0); // sanity check
	// NB that we are adding in space for a final "pixel array" (http://en.wikipedia.org/wiki/BMP_file_format#DIB_Header_.28Bitmap_Information_Header.29) even though we typically don't need it, this seems to fix the segfaults
	// maybe somehow down the line some VLC thing thinks it might be there...weirder than weird.. LODO debug it LOL.
	int bitmapSize = 14 + header.biSize + (long)(bytesPerLine)*(header.biHeight) + bytesPerLine*header.biHeight;
	pProperties->cbBuffer = bitmapSize;
	//pProperties->cbBuffer = max(pProperties->cbBuffer, m_mt.GetSampleSize()); // didn't help anything
	if(m_bConvertToI420) {
	  pProperties->cbBuffer = header.biHeight * header.biWidth*3/2; // necessary to prevent an "out of memory" error for FMLE. Yikes. Oh wow yikes.
	}

    pProperties->cBuffers = 1; // 2 here doesn't seem to help the crashes...

    // Ask the allocator to reserve us some sample memory. NOTE: the function
    // can succeed (return NOERROR) but still not have allocated the
    // memory that we requested, so we must check we got whatever we wanted.
    ALLOCATOR_PROPERTIES Actual;
    hr = pAlloc->SetProperties(pProperties,&Actual);
    if(FAILED(hr))
    {
        return hr;
    }

    // Is this allocator unsuitable?
    if(Actual.cbBuffer < pProperties->cbBuffer)
    {
        return E_FAIL;
    }

	// now some "once per run" setups
	
	// LODO reset aer with each run...somehow...somehow...Stop method or something...
	OSVERSIONINFOEX version;
    ZeroMemory(&version, sizeof(OSVERSIONINFOEX));
    version.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
	GetVersionEx((LPOSVERSIONINFO)&version);
	if(version.dwMajorVersion >= 6) { // meaning vista +
	  if(read_config_setting(TEXT("disable_aero_for_vista_plus_if_1"), 0) == 1)
	    turnAeroOn(false);
	  else
	    turnAeroOn(true);
	}
	
	if(pOldData) {
		free(pOldData);
		pOldData = NULL;
	}
    pOldData = (BYTE *) malloc(max(pProperties->cbBuffer*pProperties->cBuffers, bitmapSize)); // we convert from a 32 bit to i420, so need more space, hence max
    memset(pOldData, 0, pProperties->cbBuffer*pProperties->cBuffers); // reset it just in case :P	
	
    // create a bitmap compatible with the screen DC
	if(hRawBitmap)
		DeleteObject (hRawBitmap);
	hRawBitmap = CreateCompatibleBitmap(hScrDc, getNegotiatedFinalWidth(), getNegotiatedFinalHeight());

    return NOERROR;

} // DecideBufferSize
Exemplo n.º 10
0
HRESULT CDVBSub::ParseSample(IMediaSample* pSample)
{
    CheckPointer(pSample, E_POINTER);
    HRESULT hr;
    BYTE* pData = nullptr;
    int nSize;
    DVB_SEGMENT_TYPE nCurSegment;

    hr = pSample->GetPointer(&pData);
    if (FAILED(hr) || pData == nullptr) {
        return hr;
    }
    nSize = pSample->GetActualDataLength();

    if (*((LONG*)pData) == 0xBD010000) {
        CGolombBuffer gb(pData, nSize);

        gb.SkipBytes(4);
        WORD wLength = (WORD)gb.BitRead(16);
        UNREFERENCED_PARAMETER(wLength);

        if (gb.BitRead(2) != 2) {
            return E_FAIL;  // type
        }

        gb.BitRead(2);      // scrambling
        gb.BitRead(1);      // priority
        gb.BitRead(1);      // alignment
        gb.BitRead(1);      // copyright
        gb.BitRead(1);      // original
        BYTE fpts = (BYTE)gb.BitRead(1);    // fpts
        BYTE fdts = (BYTE)gb.BitRead(1);    // fdts
        gb.BitRead(1);      // escr
        gb.BitRead(1);      // esrate
        gb.BitRead(1);      // dsmtrickmode
        gb.BitRead(1);      // morecopyright
        gb.BitRead(1);      // crc
        gb.BitRead(1);      // extension
        gb.BitRead(8);      // hdrlen

        if (fpts) {
            BYTE b = (BYTE)gb.BitRead(4);
            if (!(fdts && b == 3 || !fdts && b == 2)) {
                ASSERT(0);
                return E_FAIL;
            }

            REFERENCE_TIME pts = 0;
            pts |= gb.BitRead(3) << 30;
            MARKER; // 32..30
            pts |= gb.BitRead(15) << 15;
            MARKER; // 29..15
            pts |= gb.BitRead(15);
            MARKER; // 14..0
            pts = 10000 * pts / 90;

            m_rtStart = pts;
            m_rtStop  = pts + 1;
        } else {
            m_rtStart = INVALID_TIME;
            m_rtStop  = INVALID_TIME;
        }

        nSize -= 14;
        pData += 14;
        pSample->GetTime(&m_rtStart, &m_rtStop);
        pSample->GetMediaTime(&m_rtStart, &m_rtStop);
    } else if (SUCCEEDED(pSample->GetTime(&m_rtStart, &m_rtStop))) {
        pSample->SetTime(&m_rtStart, &m_rtStop);
    }

    if (AddToBuffer(pData, nSize) == S_OK) {
        CGolombBuffer gb(m_pBuffer + m_nBufferReadPos, m_nBufferWritePos - m_nBufferReadPos);
        int nLastPos = 0;

        while (gb.RemainingSize() >= 6) { // Ensure there is enough data to parse the entire segment header
            if (gb.ReadByte() == 0x0F) {
                TRACE_DVB(_T("DVB - ParseSample\n"));

                WORD wPageId;
                WORD wSegLength;

                nCurSegment = (DVB_SEGMENT_TYPE)gb.ReadByte();
                wPageId = gb.ReadShort();
                wSegLength = gb.ReadShort();

                if (gb.RemainingSize() < wSegLength) {
                    hr = S_FALSE;
                    break;
                }

                switch (nCurSegment) {
                    case PAGE: {
                        if (m_pCurrentPage != nullptr) {
                            TRACE_DVB(_T("DVB - Force End display"));
                            EnqueuePage(m_rtStart);
                        }
                        UpdateTimeStamp(m_rtStart);

                        CAutoPtr<DVB_PAGE> pPage;
                        ParsePage(gb, wSegLength, pPage);

                        if (pPage->pageState == DPS_ACQUISITION || pPage->pageState == DPS_MODE_CHANGE) {
                            m_pCurrentPage = pPage;
                            m_pCurrentPage->rtStart = m_rtStart;
                            m_pCurrentPage->rtStop  = m_pCurrentPage->rtStart + m_pCurrentPage->pageTimeOut * 10000000;

                            TRACE_DVB(_T("DVB - Page started [pageState = %d] %s, TimeOut = %ds\n"), m_pCurrentPage->pageState, ReftimeToString(m_rtStart), m_pCurrentPage->pageTimeOut);
                        } else if (!m_Pages.IsEmpty()) {
                            m_pCurrentPage = pPage;
                            m_pCurrentPage->rtStart = m_rtStart;
                            m_pCurrentPage->rtStop  = m_pCurrentPage->rtStart + m_pCurrentPage->pageTimeOut * 10000000;

                            // Copy data from the previous page
                            DVB_PAGE* pPrevPage = m_Pages.GetTail();

                            memcpy(m_pCurrentPage->regions, pPrevPage->regions, sizeof(m_pCurrentPage->regions));

                            for (POSITION pos = pPrevPage->objects.GetHeadPosition(); pos;) {
                                m_pCurrentPage->objects.AddTail(pPrevPage->objects.GetNext(pos)->Copy());
                            }

                            for (POSITION pos = pPrevPage->CLUTs.GetHeadPosition(); pos;) {
                                m_pCurrentPage->CLUTs.AddTail(DEBUG_NEW DVB_CLUT(*pPrevPage->CLUTs.GetNext(pos)));
                            }

                            TRACE_DVB(_T("DVB - Page started [update] %s, TimeOut = %ds\n"), ReftimeToString(m_rtStart), m_pCurrentPage->pageTimeOut);
                        } else {
                            TRACE_DVB(_T("DVB - Page update ignored %s\n"), ReftimeToString(m_rtStart));
                        }
                    }
                    break;
                    case REGION:
                        ParseRegion(gb, wSegLength);
                        TRACE_DVB(_T("DVB - Region\n"));
                        break;
                    case CLUT:
                        ParseClut(gb, wSegLength);
                        TRACE_DVB(_T("DVB - Clut\n"));
                        break;
                    case OBJECT:
                        ParseObject(gb, wSegLength);
                        TRACE_DVB(_T("DVB - Object\n"));
                        break;
                    case DISPLAY:
                        ParseDisplay(gb, wSegLength);
                        TRACE_DVB(_T("DVB - Display\n"));
                        break;
                    case END_OF_DISPLAY:
                        if (m_pCurrentPage == nullptr) {
                            TRACE_DVB(_T("DVB - Ignored End display %s: no current page\n"), ReftimeToString(m_rtStart));
                        } else if (m_pCurrentPage->rtStart < m_rtStart) {
                            TRACE_DVB(_T("DVB - End display"));
                            EnqueuePage(m_rtStart);
                        } else {
                            TRACE_DVB(_T("DVB - Ignored End display %s: no information on page duration\n"), ReftimeToString(m_rtStart));
                        }
                        break;
                    default:
                        break;
                }
                nLastPos = gb.GetPos();
            }
        }
        m_nBufferReadPos += nLastPos;
    }

    return hr;
}
Exemplo n.º 11
0
HRESULT CDVBSub::ParseSample (IMediaSample* pSample)
{
	CheckPointer (pSample, E_POINTER);
	HRESULT				hr;
	BYTE*				pData = NULL;
	int					nSize;
	DVB_SEGMENT_TYPE	nCurSegment;

    hr = pSample->GetPointer(&pData);
    if(FAILED(hr) || pData == NULL) return hr;
	nSize = pSample->GetActualDataLength();

	if (*((LONG*)pData) == 0xBD010000)
	{
		CGolombBuffer	gb (pData, nSize);

		gb.SkipBytes(4);
		WORD	wLength	= (WORD)gb.BitRead(16);
		
		if (gb.BitRead(2) != 2) return E_FAIL;		// type

		gb.BitRead(2);		// scrambling
		gb.BitRead(1);		// priority
		gb.BitRead(1);		// alignment
		gb.BitRead(1);		// copyright
		gb.BitRead(1);		// original
		BYTE fpts = (BYTE)gb.BitRead(1);		// fpts
		BYTE fdts = (BYTE)gb.BitRead(1);		// fdts
		gb.BitRead(1);	// escr
		gb.BitRead(1);	// esrate
		gb.BitRead(1);	// dsmtrickmode
		gb.BitRead(1);	// morecopyright
		gb.BitRead(1);	// crc
		gb.BitRead(1);	// extension
		gb.BitRead(8);	// hdrlen

		if(fpts)
		{
			BYTE b = (BYTE)gb.BitRead(4);
			if(!(fdts && b == 3 || !fdts && b == 2)) {ASSERT(0); return(E_FAIL);}

			REFERENCE_TIME	pts = 0;
			pts |= gb.BitRead(3) << 30; MARKER; // 32..30
			pts |= gb.BitRead(15) << 15; MARKER; // 29..15
			pts |= gb.BitRead(15); MARKER; // 14..0
			pts = 10000*pts/90;

			m_rtStart	= pts;
			m_rtStop	= pts+1;
		}
		else
		{
			m_rtStart	= INVALID_TIME;
			m_rtStop	= INVALID_TIME;
		}

		nSize -= 14;
		pData += 14;
		pSample->GetTime(&m_rtStart, &m_rtStop);
		pSample->GetMediaTime(&m_rtStart, &m_rtStop);
	}
	else
		if (SUCCEEDED (pSample->GetTime(&m_rtStart, &m_rtStop)))
			pSample->SetTime(&m_rtStart, &m_rtStop);

	//FILE* hFile = fopen ("D:\\Sources\\mpc-hc\\A garder\\TestSubRip\\dvbsub.dat", "ab");
	//if(hFile != NULL)
	//{
	//	//BYTE	Buff[5] = {48};

	//	//*((DWORD*)(Buff+1)) = lSampleLen;
	//	//fwrite (Buff,  1, sizeof(Buff), hFile);
	//	fwrite (pData, 1, lSampleLen, hFile);
	//	fclose(hFile);
	//}

	if (AddToBuffer (pData, nSize) == S_OK)
	{
		CGolombBuffer		gb (m_pBuffer+m_nBufferReadPos, m_nBufferWritePos-m_nBufferReadPos);
		int					nLastPos = 0;

		while (!gb.IsEOF())
		{
			if (gb.ReadByte() == 0x0F)
			{
				WORD				wPageId;
				WORD				wSegLength;

				nCurSegment	= (DVB_SEGMENT_TYPE) gb.ReadByte();
				wPageId			= gb.ReadShort();
				wSegLength		= gb.ReadShort();

				if (gb.RemainingSize() < wSegLength)
				{
					hr = S_FALSE;
					break;
				}

				switch (nCurSegment)
				{
				case PAGE :
					{
						CAutoPtr<DVB_PAGE>	pPage;
						ParsePage(gb, wSegLength, pPage);

						if (pPage->PageState == DPS_ACQUISITION)
						{
							m_pCurrentPage = pPage;
							m_pCurrentPage->rtStart = m_rtStart;
							TRACE_DVB ("DVB - Page started  %S\n", ReftimeToString(m_rtStart));
							m_rtStart = INVALID_TIME;
						}
						else
							TRACE_DVB ("DVB - Page update\n");
					}
					break;
				case REGION :
					ParseRegion(gb, wSegLength);
					TRACE_DVB ("DVB - Region\n");
					break;
				case CLUT :
					ParseClut(gb, wSegLength);
					TRACE_DVB ("DVB - Clut \n");
					break;
				case OBJECT :
					ParseObject(gb, wSegLength);
					TRACE_DVB ("DVB - Object\n");
					break;
				case DISPLAY :
					ParseDisplay(gb, wSegLength);
					break;
				case END_OF_DISPLAY :
					if (m_pCurrentPage != NULL && m_rtStart != INVALID_TIME)
					{
						m_pCurrentPage->rtStop = m_rtStart;
						TRACE_DVB ("DVB - End display %S - %S\n", ReftimeToString(m_pCurrentPage->rtStart), ReftimeToString(m_pCurrentPage->rtStop));
						m_Pages.AddTail (m_pCurrentPage.Detach());
					}
					break;
				default :
//					gb.SkipBytes(wSegLength);
					break;
				}
				nLastPos = gb.GetPos();
			}
		}
		m_nBufferReadPos += nLastPos;
	}

	return hr;
}
Exemplo n.º 12
0
STDMETHODIMP CLAVSubtitleFrame::GetBitmapCount(int *count)
{
  CheckPointer(count, E_POINTER);
  *count = m_NumBitmaps;
  return S_OK;
}
Exemplo n.º 13
0
STDMETHODIMP CLAVSubtitleFrame::GetClipRect(RECT *clipRect)
{
  CheckPointer(clipRect, E_POINTER);
  *clipRect = m_clipRect;
  return S_OK;
}
Exemplo n.º 14
0
STDMETHODIMP CLAVSubtitleFrame::GetOutputRect(RECT *outputRect)
{
  CheckPointer(outputRect, E_POINTER);
  *outputRect = m_outputRect;
  return S_OK;
}
Exemplo n.º 15
0
STDMETHODIMP CSubPicAllocatorPresenterImpl::GetBin(LPCSTR field, LPVOID* value, int* size)
{
    CheckPointer(value, E_POINTER);
    CheckPointer(size, E_POINTER);
    return E_INVALIDARG;
}
Exemplo n.º 16
0
 // INSSBuffer3
 HRESULT STDMETHODCALLTYPE GetLength(DWORD *pdwLength) { CheckPointer(pdwLength, E_POINTER); *pdwLength = m_dwLength; return S_OK; }
Exemplo n.º 17
0
STDMETHODIMP CBaseSplitterFilter::GetDuration(LONGLONG* pDuration)
{
	CheckPointer(pDuration, E_POINTER);
	*pDuration = m_rtDuration;
	return S_OK;
}
Exemplo n.º 18
0
 HRESULT STDMETHODCALLTYPE GetBuffer(BYTE **ppdwBuffer) { CheckPointer(ppdwBuffer, E_POINTER); *ppdwBuffer = m_pData; return S_OK; }
Exemplo n.º 19
0
HRESULT CDSMSplitterFilter::CreateOutputs(IAsyncReader* pAsyncReader)
{
    CheckPointer(pAsyncReader, E_POINTER);

    HRESULT hr = E_FAIL;

    m_pFile.Free();
    m_pFile.Attach(DEBUG_NEW CDSMSplitterFile(pAsyncReader, hr, *this, *this));
    if (!m_pFile) {
        return E_OUTOFMEMORY;
    }
    if (FAILED(hr)) {
        m_pFile.Free();
        return hr;
    }

    m_rtNewStart = m_rtCurrent = 0;
    m_rtNewStop = m_rtStop = m_rtDuration = m_pFile->m_rtDuration;

    CAtlArray<BYTE> ids;

    POSITION pos = m_pFile->m_mts.GetStartPosition();
    while (pos) {
        BYTE id;
        CMediaType mt;
        m_pFile->m_mts.GetNextAssoc(pos, id, mt);
        ids.Add(id);
    }

    qsort(ids.GetData(), ids.GetCount(), sizeof(BYTE), compare_id);

    for (size_t i = 0; i < ids.GetCount(); i++) {
        BYTE id = ids[i];
        CMediaType& mt = m_pFile->m_mts[id];

        CStringW name, lang;
        name.Format(L"Output %02u", id);

        CAtlArray<CMediaType> mts;
        mts.Add(mt);

        CAutoPtr<CBaseSplitterOutputPin> pPinOut(DEBUG_NEW CBaseSplitterOutputPin(mts, name, this, this, &hr));

        name.Empty();

        pos = m_pFile->m_sim[id].GetStartPosition();
        while (pos) {
            CStringA key;
            CStringW value;
            m_pFile->m_sim[id].GetNextAssoc(pos, key, value);
            pPinOut->SetProperty(CStringW(key), value);

            if (key == "NAME") {
                name = value;
            }
            if (key == "LANG") {
                lang = ISOLang::ISO6392ToLanguage(CStringA(value));
                if (lang.IsEmpty()) {
                    lang = value;
                }
            }
        }

        if (!name.IsEmpty() || !lang.IsEmpty()) {
            if (!name.IsEmpty()) {
                if (!lang.IsEmpty()) {
                    name += L" (" + lang + L")";
                }
            } else if (!lang.IsEmpty()) {
                name = lang;
            }
            pPinOut->SetName(name);
        }

        EXECUTE_ASSERT(SUCCEEDED(AddOutputPin(id, pPinOut)));
    }

    pos = m_pFile->m_fim.GetStartPosition();
    while (pos) {
        CStringA key;
        CStringW value;
        m_pFile->m_fim.GetNextAssoc(pos, key, value);
        SetProperty(CStringW(key), value);
    }

    for (size_t i = 0; i < m_resources.GetCount(); i++) {
        const CDSMResource& r = m_resources[i];
        if (r.mime == L"application/x-truetype-font" ||
                r.mime == L"application/x-font-ttf" ||
                r.mime == L"application/vnd.ms-opentype") {
            //m_fontinst.InstallFont(r.data);
            m_fontinst.InstallFontMemory(r.data.GetData(), (UINT)r.data.GetCount());
        }
    }

    return !m_pOutputs.IsEmpty() ? S_OK : E_FAIL;
}
Exemplo n.º 20
0
HRESULT CDXVA2SurfaceAllocator::Alloc()
{
  DbgLog((LOG_TRACE, 10, L"CDXVA2SurfaceAllocator::Alloc()"));
  HRESULT hr = S_OK;
  IDirectXVideoDecoderService *pDXVA2Service = nullptr;

  if (!m_pDec)
    return E_FAIL;

  CheckPointer(m_pDec->m_pD3DDevMngr, E_UNEXPECTED);
  hr = m_pDec->m_pD3DDevMngr->GetVideoService (m_pDec->m_hDevice, IID_IDirectXVideoDecoderService, (void**)&pDXVA2Service);
  CheckPointer (pDXVA2Service, E_UNEXPECTED);
  CAutoLock lock(this);

  hr = __super::Alloc();

  if (SUCCEEDED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> Releasing old resources"));
    // Free the old resources.
    m_pDec->FlushFromAllocator();
    Free();

    m_nSurfaceArrayCount = m_lCount;

    // Allocate a new array of pointers.
    m_ppRTSurfaceArray = new IDirect3DSurface9*[m_lCount];
    if (m_ppRTSurfaceArray == nullptr) {
      hr = E_OUTOFMEMORY;
    } else {
      ZeroMemory(m_ppRTSurfaceArray, sizeof(IDirect3DSurface9*) * m_lCount);
    }
  }

  // Allocate the surfaces.
  if (SUCCEEDED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> Allocating surfaces"));
    hr = pDXVA2Service->CreateSurface(
      m_pDec->m_dwSurfaceWidth,
      m_pDec->m_dwSurfaceHeight,
      m_lCount - 1,
      m_pDec->m_eSurfaceFormat,
      D3DPOOL_DEFAULT,
      0,
      DXVA2_VideoDecoderRenderTarget,
      m_ppRTSurfaceArray,
      nullptr
      );
  }

  if (SUCCEEDED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> Creating samples"));
    // Important : create samples in reverse order !
    for (int i = m_lCount-1; i >= 0; i--) {
      CDXVA2Sample *pSample = new CDXVA2Sample(this, &hr);
      if (pSample == nullptr) {
        hr = E_OUTOFMEMORY;
        break;
      }
      if (FAILED(hr)) {
        break;
      }
      // Assign the Direct3D surface pointer and the index.
      pSample->SetSurface(i, m_ppRTSurfaceArray[i]);

      // Add to the sample list.
      m_lFree.Add(pSample);
    }

    hr = m_pDec->CreateDXVA2Decoder(m_lCount, m_ppRTSurfaceArray);
    if (FAILED (hr)) {
      Free();
    }
  }

  m_lAllocated = m_lCount;

  if (SUCCEEDED(hr)) {
    m_bChanged = FALSE;
  }
  SafeRelease(&pDXVA2Service);
  return hr;
}
Exemplo n.º 21
0
HRESULT CHdmvSub::ParseSample(IMediaSample* pSample)
{
    CheckPointer(pSample, E_POINTER);
    HRESULT hr;
    REFERENCE_TIME rtStart = INVALID_TIME, rtStop = INVALID_TIME;
    BYTE* pData = nullptr;
    int lSampleLen;

    hr = pSample->GetPointer(&pData);
    if (FAILED(hr) || pData == nullptr) {
        return hr;
    }
    lSampleLen = pSample->GetActualDataLength();

    pSample->GetTime(&rtStart, &rtStop);
    if (pData) {
        CGolombBuffer SampleBuffer(pData, lSampleLen);

        while (!SampleBuffer.IsEOF()) {
            if (m_nCurSegment == NO_SEGMENT) {
                HDMV_SEGMENT_TYPE nSegType = (HDMV_SEGMENT_TYPE)SampleBuffer.ReadByte();
                unsigned short nUnitSize = SampleBuffer.ReadShort();
                lSampleLen -= 3;

                switch (nSegType) {
                    case PALETTE:
                    case OBJECT:
                    case PRESENTATION_SEG:
                    case END_OF_DISPLAY:
                        m_nCurSegment = nSegType;
                        AllocSegment(nUnitSize);
                        break;

                    case WINDOW_DEF:
                    case INTERACTIVE_SEG:
                    case HDMV_SUB1:
                    case HDMV_SUB2:
                        // Ignored stuff...
                        SampleBuffer.SkipBytes(nUnitSize);
                        break;
                    default:
                        return VFW_E_SAMPLE_REJECTED;
                }
            }

            if (m_nCurSegment != NO_SEGMENT) {
                if (m_nSegBufferPos < m_nSegSize) {
                    int nSize = min(m_nSegSize - m_nSegBufferPos, lSampleLen);
                    SampleBuffer.ReadBuffer(m_pSegBuffer + m_nSegBufferPos, nSize);
                    m_nSegBufferPos += nSize;
                }

                if (m_nSegBufferPos >= m_nSegSize) {
                    CGolombBuffer SegmentBuffer(m_pSegBuffer, m_nSegSize);

                    switch (m_nCurSegment) {
                        case PALETTE:
                            TRACE_HDMVSUB(_T("CHdmvSub:PALETTE            rtStart=%10I64d\n"), rtStart);
                            ParsePalette(&SegmentBuffer, m_nSegSize);
                            break;
                        case OBJECT:
                            TRACE_HDMVSUB(_T("CHdmvSub:OBJECT             %s\n"), ReftimeToString(rtStart));
                            ParseObject(&SegmentBuffer, m_nSegSize);
                            break;
                        case PRESENTATION_SEG:
                            TRACE_HDMVSUB(_T("CHdmvSub:PRESENTATION_SEG   %s (size=%d)\n"), ReftimeToString(rtStart), m_nSegSize);

                            // Enqueue the current presentation segment if any
                            EnqueuePresentationSegment(rtStart);
                            // Parse the new presentation segment
                            ParsePresentationSegment(rtStart, &SegmentBuffer);

                            break;
                        case WINDOW_DEF:
                            //TRACE_HDMVSUB(_T("CHdmvSub:WINDOW_DEF         %s\n"), ReftimeToString(rtStart));
                            break;
                        case END_OF_DISPLAY:
                            //TRACE_HDMVSUB(_T("CHdmvSub:END_OF_DISPLAY     %s\n"), ReftimeToString(rtStart));
                            break;
                        default:
                            TRACE_HDMVSUB(_T("CHdmvSub:UNKNOWN Seg %d     rtStart=0x%10dd\n"), m_nCurSegment, rtStart);
                    }

                    m_nCurSegment = NO_SEGMENT;
                }
            }
        }
    }

    return hr;
}
Exemplo n.º 22
0
HRESULT STDMETHODCALLTYPE CGraphConnector::GetInputFilter( IBaseFilter **InputFilter )
{ 
	CheckPointer(InputFilter,E_POINTER);
	return m_pInputFilter->NonDelegatingQueryInterface( __uuidof( IBaseFilter ), (void**)InputFilter );
}
Exemplo n.º 23
0
HRESULT xDSVideoRenderer::FillRGBWithYV12(IMediaSample * pSample)
{
    unsigned char* pDstData = NULL;
    unsigned char* pSrcData = NULL;

    IVideoRenderTarget::VRT_LockRect RTRect;

    int lDstPitch = 0;
    int lSrcPitch = 0;
    int Bbp = 0;

    m_pRenderTarget->onStartRender();
    m_pRenderTarget->lock(RTRect);
    pDstData = RTRect.mPixel;
    CheckPointer(pSample,E_POINTER);
    pSample->GetPointer( &pSrcData);
    lDstPitch = RTRect.mPicth ;
    lSrcPitch = m_lVidWidth ;//m_lVidPitch;
    Bbp = RTRect.mBytePerPixel;
    unsigned char* pSrcLine = pSrcData;
    unsigned char* pDstLine = pDstData ;//+ (RTRect.mHeight - 1) * RTRect.mWidth * Bbp;


    if(m_pRenderTarget->flipY())
    {
        pDstLine += (RTRect.mHeight - 1) * lDstPitch;
        lDstPitch =- lDstPitch;
    }

    int _R = 0 , _G = 1 , _B = 2, _A = 3;
    if(m_pRenderTarget->flipRGBA() )
    {
        _B = 0;
        _G = 1;
        _R = 2;
        _A = 3;
    }
    if(pSrcData == NULL || pDstData == NULL)
    {
#ifdef _DEBUG
        OutputDebugString(L"CVideoRender DoSampler 的Buffer为空\n");
#endif
        m_pRenderTarget->unlock();
        return E_FAIL;
    }

    if(Bbp == 3)
    {
        for(int y = 0 ; y < RTRect.mHeight ; ++y)
        {
            for(int x = 0 ; x < RTRect.mWidth ; ++x)
            {
                pDstLine[3 * x + _R] = pSrcLine[x];//pSrcLine[3 * x + 0];
                pDstLine[3 * x + _G] = pSrcLine[x];//pSrcLine[3 * x + 1];
                pDstLine[3 * x + _B] = pSrcLine[x];//pSrcLine[3 * x + 2];
            }
            pSrcLine += lSrcPitch;
            pDstLine += lDstPitch;
        }

    }
    else if(Bbp == 4)
    {
        for(int y = 0 ; y < RTRect.mHeight ; ++y)
        {
            for(int x = 0 ; x < RTRect.mWidth ; ++x)
            {
                pDstLine[4 * x + _R] = pSrcLine[x];//pSrcLine[3 * x + 0];
                pDstLine[4 * x + _G] = pSrcLine[x];//pSrcLine[3 * x + 1];
                pDstLine[4 * x + _B] = pSrcLine[x];//pSrcLine[3 * x + 2];
                pDstLine[4 * x + _A] = m_Alpha;
            }
            pSrcLine += lSrcPitch;
            pDstLine += lDstPitch;
        }

    }
    else
    {
        m_pRenderTarget->unlock();
        m_pRenderTarget->onEndRender(false);
        return E_FAIL;
    }
    //解锁 Video的RenderTarget
    m_pRenderTarget->unlock();
    m_pRenderTarget->onEndRender(true);
    return S_OK;
}
Exemplo n.º 24
0
HRESULT STDMETHODCALLTYPE CGraphConnector::GetConnectorInputPin( IPin **InputPin)
{
	CheckPointer(InputPin,E_POINTER);
	return m_pInputPin->NonDelegatingQueryInterface( __uuidof( IPin ), (void**)InputPin );
}
Exemplo n.º 25
0
BOOL CScreenSnap::GetScreenData(BYTE* pScreenData,DWORD &dwScreenData)
{
	CDC dc;
	dc.CreateDC("DISPLAY",NULL,NULL,NULL);
	CheckPointer(dc.GetSafeHdc(),FALSE);
	
	CBitmap bm;
	int nWidth	= GetSystemMetrics(SM_CXSCREEN);
	int nHeight	= GetSystemMetrics(SM_CYSCREEN);
	if(!bm.CreateCompatibleBitmap(&dc,nWidth,nHeight))
	{
		dc.DeleteDC();
		return FALSE;
	}

	CDC tdc;
	if(!tdc.CreateCompatibleDC(&dc))
	{
		dc.DeleteDC();
		return FALSE;
	}

	CBitmap*pOld = tdc.SelectObject(&bm);
	if(pOld == NULL)
	{
		tdc.DeleteDC();
		dc.DeleteDC();
		return FALSE;
	}

	tdc.BitBlt(0,0,nWidth,nHeight,&dc,0,0,SRCCOPY);
	tdc.SelectObject(pOld);
	
	BITMAP btm;
	bm.GetBitmap(&btm);
	DWORD nSize  = btm.bmWidthBytes * btm.bmHeight;
//	LPSTR lpData = (LPSTR)GlobalAllocPtr(GPTR,nSize);
//	if(lpData == NULL)
//	{
//		tdc.DeleteDC();
//		dc.DeleteDC();
//		return FALSE;
//	}

	//--------------------------------------------------------

	BITMAPINFOHEADER bih;
	bih.biBitCount		= btm.bmBitsPixel;
	bih.biClrImportant	= 0;
	bih.biClrUsed		= 0;
	bih.biCompression	= 0;
	bih.biHeight		= btm.bmHeight;
	bih.biPlanes		= 1;
	bih.biSize			= sizeof(BITMAPINFOHEADER);
	bih.biSizeImage		= nSize;
	bih.biWidth			= btm.bmWidth;
	bih.biXPelsPerMeter	= 0;
	bih.biYPelsPerMeter	= 0;

	//--------------------------------------------------------
	//得到DIB色彩表
	if(!GetDIBits(dc,bm,0,bih.biHeight,(pScreenData + sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)),(BITMAPINFO*)&bih,DIB_RGB_COLORS))
	{
		tdc.DeleteDC();
		dc.DeleteDC();
		return FALSE;
	}
		
	//--------------------------------------------------------
	
	BITMAPFILEHEADER bfh;
	bfh.bfReserved1	= bfh.bfReserved2 = 0;
	bfh.bfType		= ((WORD)('M'<< 8)|'B');
	bfh.bfSize		= 54 + nSize;
	bfh.bfOffBits	= 54;
	
	memcpy(pScreenData,&bfh,sizeof(BITMAPFILEHEADER));
	dwScreenData = sizeof(BITMAPFILEHEADER);
	
	memcpy(pScreenData + dwScreenData,&bih,sizeof(BITMAPINFOHEADER));
	dwScreenData += sizeof(BITMAPINFOHEADER);
	
//	memcpy(pScreenData,lpData,nSize);
	dwScreenData += nSize;

//	GlobalFreePtr(lpData);

	tdc.DeleteDC();
	dc.DeleteDC();

	return TRUE;
}
Exemplo n.º 26
0
//
// GetMediaType
//
// Prefer 5 formats - 8, 16 (*2), 24 or 32 bits per pixel
//
// Prefered types should be ordered by quality, with zero as highest quality.
// Therefore, iPosition =
//      0    Return a 32bit mediatype
//      1    Return a 24bit mediatype
//      2    Return 16bit RGB565
//      3    Return a 16bit mediatype (rgb555)
//      4    Return 8 bit palettised format
//      >4   Invalid
//
HRESULT CVCamPin::GetMediaType(int iPosition, CMediaType *pmt)
{
    CheckPointer(pmt,E_POINTER);
    CAutoLock cAutoLock(m_pFilter->pStateLock());

    if(iPosition < 0)
        return E_INVALIDARG;

    // Have we run off the end of types?
    if(iPosition > 4)
        return VFW_S_NO_MORE_ITEMS;

    VIDEOINFO *pvi = (VIDEOINFO *) pmt->AllocFormatBuffer(sizeof(VIDEOINFO));
    if(NULL == pvi)
        return(E_OUTOFMEMORY);

    // Initialize the VideoInfo structure before configuring its members
    ZeroMemory(pvi, sizeof(VIDEOINFO));

    switch(iPosition)
    {
        case 0:
        {    
            // Return our highest quality 32bit format

            // Since we use RGB888 (the default for 32 bit), there is
            // no reason to use BI_BITFIELDS to specify the RGB
            // masks. Also, not everything supports BI_BITFIELDS
            pvi->bmiHeader.biCompression = BI_RGB;
            pvi->bmiHeader.biBitCount    = 32;
            break;
        }

        case 1:
        {   // Return our 24bit format
            pvi->bmiHeader.biCompression = BI_RGB;
            pvi->bmiHeader.biBitCount    = 24;
            break;
        }

        case 2:
        {       
            // 16 bit per pixel RGB565

            // Place the RGB masks as the first 3 doublewords in the palette area
            for(int i = 0; i < 3; i++)
                pvi->TrueColorInfo.dwBitMasks[i] = bits565[i];

            pvi->bmiHeader.biCompression = BI_BITFIELDS;
            pvi->bmiHeader.biBitCount    = 16;
            break;
        }

        case 3:
        {   // 16 bits per pixel RGB555

            // Place the RGB masks as the first 3 doublewords in the palette area
            for(int i = 0; i < 3; i++)
                pvi->TrueColorInfo.dwBitMasks[i] = bits555[i];

            pvi->bmiHeader.biCompression = BI_BITFIELDS;
            pvi->bmiHeader.biBitCount    = 16;
            break;
        }

        case 4:
        {   // 8 bit palettised

            pvi->bmiHeader.biCompression = BI_RGB;
            pvi->bmiHeader.biBitCount    = 8;
            pvi->bmiHeader.biClrUsed     = iPALETTE_COLORS;
            break;
        }
    }

    // Adjust the parameters common to all formats
    pvi->bmiHeader.biSize       = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth      = m_iImageWidth;
    pvi->bmiHeader.biHeight     = m_iImageHeight;
    pvi->bmiHeader.biPlanes     = 1;
    pvi->bmiHeader.biSizeImage  = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;

    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

    pmt->SetType(&MEDIATYPE_Video);
    pmt->SetFormatType(&FORMAT_VideoInfo);
    pmt->SetTemporalCompression(FALSE);

    // Work out the GUID for the subtype from the header info.
    const GUID SubTypeGUID = GetBitmapSubtype(&pvi->bmiHeader);
    pmt->SetSubtype(&SubTypeGUID);
    pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);

    return NOERROR;

} // GetMediaType
Exemplo n.º 27
0
void ProcessDecaySrc()
{  
  long mat, iso, nuc, ptr, src;
  double vol, adens, lambda, tot, prev, I;

  fprintf(out, "Processing decay source...\n");
  
  /* Check total source rate */
  
  if (RDB[DATA_TOT_PHOTON_SRC_RATE] == 0.0)
    Error(0, "Total photon source rate is zero in decay source mode");

  /* Loop over materials */

  mat = (long)RDB[DATA_PTR_M0];
  while (mat > VALID_PTR)
    {
      /* Get volume */

      vol = RDB[mat + MATERIAL_VOLUME];

      /* Check total photon source rate */
	  
      if ((vol == 0.0) || ((RDB[mat + MATERIAL_PHOTON_SRC_RATE]/
			    RDB[DATA_TOT_PHOTON_SRC_RATE]) < 1E-19))
	{
	  /* Next material */
	  
	  mat = NextItem(mat);

	  /* Cycle loop */

	  continue;
	}

      /* Check that pointer is not defined (NOTE: Tää on lähinnä sitä   */
      /* varten että jos tätä rutiinia joskus kutsutaan silmukasta niin */
      /* muistia ei varata turhaan. */

      if ((long)RDB[mat + MATERIAL_PTR_DECAY_SRC] > VALID_PTR)
	Die(FUNCTION_NAME, "Pointer to decay source already exists");

      /* Avoid compiler warning */

      src = -1;

      /* Reset total */

      tot = 0.0;

      /* Loop over composition */

      iso = (long)RDB[mat + MATERIAL_PTR_ORIG_NUC_COMP];
      while (iso > VALID_PTR)
	{
	  /* Get atomic density */
	  
	  adens = RDB[iso + COMPOSITION_ADENS]*1E+24;

	  /* Get pointer to nuclide data */

	  nuc = (long)RDB[iso + COMPOSITION_PTR_NUCLIDE];
	  CheckPointer(FUNCTION_NAME, "(nuc)", DATA_ARRAY, nuc);

	  /* Get decay constant */

	  lambda = RDB[nuc + NUCLIDE_LAMBDA];

	  /* Get total intensity */

	  I = RDB[nuc + NUCLIDE_SPEC_PHOTON_I];

	  /* Check intensity */

	  if (I*lambda*adens*vol/RDB[mat + MATERIAL_PHOTON_SRC_RATE] < 1E-18)
	    {
	      /* Skip nuclide */

	      iso = NextItem(iso);

	      /* Cycle loop */

	      continue;
	    }

	  /* Create entry */

	  src = NewItem(mat + MATERIAL_PTR_DECAY_SRC, SRC_DECCAY_BLOCK_SIZE);
	      
	  /* Put nuclide pointer */

	  WDB[src + SRC_DECCAY_PTR_NUCLIDE] = (double)nuc;
	      
	  /* Put emission rate */

	  WDB[src + SRC_DECCAY_I] = I*lambda*adens*vol;

	  /* Reset weight */

	  WDB[src + SRC_DECCAY_WGT] = 1.0;

	  /* Add to total */

	  tot = tot + RDB[src + SRC_DECCAY_I];

	  /* Put pointer to line spectra */

	  ptr = (long)RDB[nuc + NUCLIDE_PTR_PHOTON_LINE_SPEC];
	  CheckPointer(FUNCTION_NAME, "(ptr)", DATA_ARRAY, ptr);
	  WDB[src + SRC_DECCAY_PTR_SPEC] = (double)ptr;
	  
	  /* Next nuclide */

	  iso = NextItem(iso);
	}
     
      /* Check if source was created */

      if (src > VALID_PTR)
	{
	  /* Close list */

	  CloseList(src);

	  /* Sort */

	  SortList(src, SRC_DECCAY_I, SORT_MODE_DESCEND);

	  /* Check total */

	  if (tot == 0.0)
	    Die(FUNCTION_NAME, "WTF?");
	  
	  /* Calculate cumulative probabilities */

	  prev = 0.0;
	  
	  src = (long)RDB[mat + MATERIAL_PTR_DECAY_SRC];
	  while (src > VALID_PTR)
	    {
	      /* Calculate cumulative probability */
	      
	      WDB[src + SRC_DECCAY_CUM_P] = 
		(prev + RDB[src + SRC_DECCAY_I])/tot;
	      
	      /* Update previous */
	      
	      prev = prev + RDB[src + SRC_DECCAY_I];

	      /* Next */
	      
	      src = NextItem(src);
	    }

	  /* Allocate memory for sampled stats */

	  ptr = NewStat("SAMPLED_PHOTON_SRC", 1, 1);
	  WDB[mat + MATERIAL_SAMPLED_PHOTON_SRC] = (double)ptr;  
	}

      /* Next material */

      mat = NextItem(mat);
    } 

  fprintf(out, "OK.\n\n");
}
Exemplo n.º 28
0
STDMETHODIMP CSubPicAllocatorPresenterImpl::GetBool(LPCSTR field, bool* value)
{
    CheckPointer(value, E_POINTER);
    return E_INVALIDARG;
}
Exemplo n.º 29
0
STDMETHODIMP CDecAvcodec::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
  DestroyDecoder();
  DbgLog((LOG_TRACE, 10, L"Initializing ffmpeg for codec %S", avcodec_get_name(codec)));

  BITMAPINFOHEADER *pBMI = nullptr;
  videoFormatTypeHandler((const BYTE *)pmt->Format(), pmt->FormatType(), &pBMI);

  m_pAVCodec = avcodec_find_decoder(codec);
  CheckPointer(m_pAVCodec, VFW_E_UNSUPPORTED_VIDEO);

  m_pAVCtx = avcodec_alloc_context3(m_pAVCodec);
  CheckPointer(m_pAVCtx, E_POINTER);

  DWORD dwDecFlags = m_pCallback->GetDecodeFlags();

  // Use parsing for mpeg1/2 at all times, or H264/HEVC when its not from LAV Splitter
  if(    codec == AV_CODEC_ID_MPEG1VIDEO
      || codec == AV_CODEC_ID_MPEG2VIDEO
      || (!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) &&
         (pmt->subtype == MEDIASUBTYPE_H264
       || pmt->subtype == MEDIASUBTYPE_h264
       || pmt->subtype == MEDIASUBTYPE_X264
       || pmt->subtype == MEDIASUBTYPE_x264
       || pmt->subtype == MEDIASUBTYPE_H264_bis
       || pmt->subtype == MEDIASUBTYPE_HEVC))) {
    m_pParser = av_parser_init(codec);
  }

  LONG biRealWidth = pBMI->biWidth, biRealHeight = pBMI->biHeight;
  if (pmt->formattype == FORMAT_VideoInfo || pmt->formattype == FORMAT_MPEGVideo) {
    VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->Format();
    if (vih->rcTarget.right != 0 && vih->rcTarget.bottom != 0) {
      biRealWidth  = vih->rcTarget.right;
      biRealHeight = vih->rcTarget.bottom;
    }
  } else if (pmt->formattype == FORMAT_VideoInfo2 || pmt->formattype == FORMAT_MPEG2Video) {
    VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)pmt->Format();
    if (vih2->rcTarget.right != 0 && vih2->rcTarget.bottom != 0) {
      biRealWidth  = vih2->rcTarget.right;
      biRealHeight = vih2->rcTarget.bottom;
    }
  }

  m_pAVCtx->codec_id              = codec;
  m_pAVCtx->codec_tag             = pBMI->biCompression;
  m_pAVCtx->coded_width           = pBMI->biWidth;
  m_pAVCtx->coded_height          = abs(pBMI->biHeight);
  m_pAVCtx->bits_per_coded_sample = pBMI->biBitCount;
  m_pAVCtx->err_recognition       = 0;
  m_pAVCtx->workaround_bugs       = FF_BUG_AUTODETECT;
  m_pAVCtx->refcounted_frames     = 1;

  // Setup threading
  // Thread Count. 0 = auto detect
  int thread_count = m_pSettings->GetNumThreads();
  if (thread_count == 0) {
    thread_count = av_cpu_count();
  }
  m_pAVCtx->thread_count = max(1, min(thread_count, AVCODEC_MAX_THREADS));

  if (dwDecFlags & LAV_VIDEO_DEC_FLAG_NO_MT || codec == AV_CODEC_ID_MPEG4) {
    m_pAVCtx->thread_count = 1;
  }

  m_pFrame = av_frame_alloc();
  CheckPointer(m_pFrame, E_POINTER);

  // Process Extradata
  BYTE *extra = nullptr;
  size_t extralen = 0;
  getExtraData(*pmt, nullptr, &extralen);

  BOOL bH264avc = FALSE;
  if (pmt->formattype == FORMAT_MPEG2Video && (m_pAVCtx->codec_tag == MAKEFOURCC('a','v','c','1') || m_pAVCtx->codec_tag == MAKEFOURCC('A','V','C','1') || m_pAVCtx->codec_tag == MAKEFOURCC('C','C','V','1'))) {
    // Reconstruct AVC1 extradata format
    DbgLog((LOG_TRACE, 10, L"-> Processing AVC1 extradata of %d bytes", extralen));
    MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->Format();
    extralen += 7;
    extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
    extra[0] = 1;
    extra[1] = (BYTE)mp2vi->dwProfile;
    extra[2] = 0;
    extra[3] = (BYTE)mp2vi->dwLevel;
    extra[4] = (BYTE)(mp2vi->dwFlags ? mp2vi->dwFlags : 4) - 1;

    // only process extradata if available
    uint8_t ps_count = 0;
    if (extralen > 7) {
      // Actually copy the metadata into our new buffer
      size_t actual_len;
      getExtraData(*pmt, extra + 6, &actual_len);

      // Count the number of SPS/PPS in them and set the length
      // We'll put them all into one block and add a second block with 0 elements afterwards
      // The parsing logic does not care what type they are, it just expects 2 blocks.
      BYTE *p = extra + 6, *end = extra + 6 + actual_len;
      BOOL bSPS = FALSE, bPPS = FALSE;
      while (p + 1 < end) {
        unsigned len = (((unsigned)p[0] << 8) | p[1]) + 2;
        if (p + len > end) {
          break;
        }
        if ((p[2] & 0x1F) == 7)
          bSPS = TRUE;
        if ((p[2] & 0x1F) == 8)
          bPPS = TRUE;
        ps_count++;
        p += len;
      }
    }
    extra[5] = ps_count;
    extra[extralen - 1] = 0;

    bH264avc = TRUE;
    m_pAVCtx->extradata = extra;
    m_pAVCtx->extradata_size = (int)extralen;
  } else if (extralen > 0) {
    DbgLog((LOG_TRACE, 10, L"-> Processing extradata of %d bytes", extralen));
    if (pmt->subtype == MEDIASUBTYPE_LAV_RAWVIDEO) {
      if (extralen < sizeof(m_pAVCtx->pix_fmt)) {
        DbgLog((LOG_TRACE, 10, L"-> LAV RAW Video extradata is missing.."));
      } else {
        extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
        getExtraData(*pmt, extra, nullptr);
        m_pAVCtx->pix_fmt = *(AVPixelFormat *)extra;
        extralen -= sizeof(AVPixelFormat);
        memmove(extra, extra+sizeof(AVPixelFormat), extralen);
      }
    } else if (codec == AV_CODEC_ID_VP9) {
      // read custom vpcC headers
      if (extralen >= 16) {
        extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
        getExtraData(*pmt, extra, nullptr);

        if (AV_RB32(extra) == MKBETAG('v', 'p', 'c', 'C') && AV_RB8(extra + 4) == 1) {
          m_pAVCtx->profile = AV_RB8(extra + 8);
          m_pAVCtx->color_primaries = (AVColorPrimaries)AV_RB8(extra + 11);
          m_pAVCtx->color_trc = (AVColorTransferCharacteristic)AV_RB8(extra + 12);
          m_pAVCtx->colorspace = (AVColorSpace)AV_RB8(extra + 13);

          int bitdepth = AV_RB8(extra + 10) >> 4;
          if (m_pAVCtx->profile == 2 && bitdepth == 10) {
            m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P10;
          }
          else if (m_pAVCtx->profile == 2 && bitdepth == 12) {
            m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P12;
          }
        }

        av_freep(&extra);
        extralen = 0;
      }
STDMETHODIMP CAudioAnalyzer::get_CaptureConfiguration(int *pVal)
{
    CheckPointer(pVal, E_POINTER);
    *pVal = m_config;
    return S_OK;
}