Example #1
0
HRESULT STDMETHODCALLTYPE UVCamStream::GetFormat(AM_MEDIA_TYPE **ppmt)
{
	CheckPointer(ppmt,E_POINTER); 
	CAutoLock cAutoLock(m_pFilter->pStateLock());
	*ppmt = CreateMediaType(&m_mt);
	return S_OK;
}
STDMETHODIMP CStreamSwitcherFilter::Info(long lIndex, AM_MEDIA_TYPE** ppmt, DWORD* pdwFlags, LCID* plcid, DWORD* pdwGroup, WCHAR** ppszName, IUnknown** ppObject, IUnknown** ppUnk)
{
	CAutoLock cAutoLock(&m_csPins);

	CBasePin* pPin = GetConnectedInputPin(lIndex);
	if(!pPin) return E_INVALIDARG;

	if(ppmt)
		*ppmt = CreateMediaType(&m_pOutput->CurrentMediaType());

	if(pdwFlags)
		*pdwFlags = (m_pInput == pPin) ? AMSTREAMSELECTINFO_EXCLUSIVE : 0;

	if(plcid)
		*plcid = 0;

	if(pdwGroup)
		*pdwGroup = 0;

	if(ppszName && (*ppszName = (WCHAR*)CoTaskMemAlloc((wcslen(pPin->Name())+1)*sizeof(WCHAR))))
		wcscpy(*ppszName, pPin->Name());

	if(ppObject)
		*ppObject = NULL;

	if(ppUnk)
		*ppUnk = NULL;

	return S_OK;
}
Example #3
0
STDMETHODIMP
CMediaSample::SetMediaType(AM_MEDIA_TYPE *pMediaType)
{
	/* Delete the current media type */

	if (m_pMediaType) {
		DeleteMediaType(m_pMediaType);
		m_pMediaType = NULL;
	}

	/* Mechanism for resetting the format type */

	if (pMediaType == NULL) {
		m_dwFlags &= ~Sample_TypeChanged;
		return NOERROR;
	}

	ASSERT(pMediaType);
	ValidateReadPtr(pMediaType,sizeof(AM_MEDIA_TYPE));

	/* Take a copy of the media type */

	m_pMediaType = CreateMediaType(pMediaType);
	if (m_pMediaType == NULL) {
		m_dwFlags &= ~Sample_TypeChanged;
		return E_OUTOFMEMORY;
	}

	m_dwFlags |= Sample_TypeChanged;
	return NOERROR;
}
// get's the current format...I guess...
// or get default if they haven't called SetFormat yet...
// LODO the default, which probably we don't do yet...unless they've already called GetStreamCaps then it'll be the last index they used LOL.
HRESULT STDMETHODCALLTYPE CPushPinDesktop::GetFormat(AM_MEDIA_TYPE **ppmt)
{
    CAutoLock cAutoLock(m_pFilter->pStateLock());

    *ppmt = CreateMediaType(&m_mt); // windows internal method, also does copy
    return S_OK;
}
Example #5
0
HRESULT CLAVOutputPin::QueuePacket(Packet *pPacket)
{
  if(!ThreadExists()) return S_FALSE;

  CLAVSplitter *pSplitter = static_cast<CLAVSplitter*>(m_pFilter);

  // While everything is good AND no pin is drying AND the queue is full .. sleep
  // The queu has a "soft" limit of MAX_PACKETS_IN_QUEUE, and a hard limit of MAX_PACKETS_IN_QUEUE * 2
  // That means, even if one pin is drying, we'll never exceed MAX_PACKETS_IN_QUEUE * 2
  while(S_OK == m_hrDeliver 
    && (m_queue.DataSize() > m_dwQueueMaxMem
    || m_queue.Size() > 2*m_dwQueueHigh
    || (m_queue.Size() > m_dwQueueHigh && !pSplitter->IsAnyPinDrying())))
    Sleep(10);

  if(S_OK != m_hrDeliver) {
    SAFE_DELETE(pPacket);
    return m_hrDeliver;
  }

  {
    CAutoLock lock(&m_csMT);
    if(m_newMT && pPacket) {
      DbgLog((LOG_TRACE, 10, L"::QueuePacket() - Found new Media Type"));
      pPacket->pmt = CreateMediaType(m_newMT);
      SetStreamMediaType(m_newMT);
      SAFE_DELETE(m_newMT);
    }
  }

  m_Parser.Parse(m_StreamMT.subtype, pPacket);

  return m_hrDeliver;
}
Example #6
0
HRESULT STDMETHODCALLTYPE CVCamStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
    *pmt = CreateMediaType(&m_mt);
    DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat);

//    if (iIndex == 0) iIndex = 4;
    if (iIndex == 0) iIndex = 8;

    pvi->bmiHeader.biCompression = BI_RGB;
    pvi->bmiHeader.biBitCount    = 24;
    pvi->bmiHeader.biSize       = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth      = 80 * iIndex;
    pvi->bmiHeader.biHeight     = 60 * iIndex;
    pvi->bmiHeader.biPlanes     = 1;
    pvi->bmiHeader.biSizeImage  = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;

    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

    (*pmt)->majortype = MEDIATYPE_Video;
    (*pmt)->subtype = MEDIASUBTYPE_RGB24;
    (*pmt)->formattype = FORMAT_VideoInfo;
    (*pmt)->bTemporalCompression = FALSE;
    (*pmt)->bFixedSizeSamples= FALSE;
    (*pmt)->lSampleSize = pvi->bmiHeader.biSizeImage;
    (*pmt)->cbFormat = sizeof(VIDEOINFOHEADER);
    
    DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC);
    
    pvscc->guid = FORMAT_VideoInfo;
    pvscc->VideoStandard = AnalogVideo_None;
    pvscc->InputSize.cx = 640;
    pvscc->InputSize.cy = 480;
    pvscc->MinCroppingSize.cx = 80;
    pvscc->MinCroppingSize.cy = 60;
    pvscc->MaxCroppingSize.cx = 640;
    pvscc->MaxCroppingSize.cy = 480;
    pvscc->CropGranularityX = 80;
    pvscc->CropGranularityY = 60;
    pvscc->CropAlignX = 0;
    pvscc->CropAlignY = 0;

    pvscc->MinOutputSize.cx = 80;
    pvscc->MinOutputSize.cy = 60;
    pvscc->MaxOutputSize.cx = 640;
    pvscc->MaxOutputSize.cy = 480;
    pvscc->OutputGranularityX = 0;
    pvscc->OutputGranularityY = 0;
    pvscc->StretchTapsX = 0;
    pvscc->StretchTapsY = 0;
    pvscc->ShrinkTapsX = 0;
    pvscc->ShrinkTapsY = 0;
    pvscc->MinFrameInterval = 200000;   //50 fps
    pvscc->MaxFrameInterval = 50000000; // 0.2 fps
    pvscc->MinBitsPerSecond = (80 * 60 * 3 * 8) / 5;
    pvscc->MaxBitsPerSecond = 640 * 480 * 3 * 8 * 50;

    return S_OK;
}
Example #7
0
// Format negotiation
HRESULT CTimeStretchFilter::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder)
{
  if (!pwfx)
    return VFW_E_TYPE_NOT_ACCEPTED;

#ifdef INTEGER_SAMPLES
  // only accept 16bit int
  if (pwfx->Format.wBitsPerSample != 16 || pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_PCM)
    return VFW_E_TYPE_NOT_ACCEPTED;
#else 
  // only accept 32bit float
  if (pwfx->Format.wBitsPerSample != 32 || pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
    return VFW_E_TYPE_NOT_ACCEPTED;
#endif

  if (FormatsEqual(pwfx, m_pInputFormat))
  {
    *pChOrder = m_chOrder;
    return S_OK;
  }

  bool bApplyChanges = (nApplyChangesDepth != 0);
  if (nApplyChangesDepth != INFINITE && nApplyChangesDepth > 0)
    nApplyChangesDepth--;

  HRESULT hr = m_pNextSink->NegotiateFormat(pwfx, nApplyChangesDepth, pChOrder);
  if (FAILED(hr))
    return hr;

  hr = VFW_E_CANNOT_CONNECT;
  
  if (!pwfx)
    return SetFormat(NULL);

  if (bApplyChanges)
  {
    LogWaveFormat(pwfx, "TS   - applying ");

    AM_MEDIA_TYPE tmp;
    HRESULT result = CreateAudioMediaType((WAVEFORMATEX*)pwfx, &tmp, true);
    if (SUCCEEDED(result))
    {
      if (m_pMediaType)
        DeleteMediaType(m_pMediaType);
      m_pMediaType = CreateMediaType(&tmp);
    }

    SetInputFormat(pwfx);
    SetOutputFormat(pwfx);
    SetFormat(pwfx);
  }
  else
    LogWaveFormat(pwfx, "TS   -          ");

  m_chOrder = *pChOrder;

  return S_OK;
}
// returns the "range" of fps, etc. for this index
HRESULT STDMETHODCALLTYPE CPushPinDesktop::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
    CAutoLock cAutoLock(m_pFilter->pStateLock());
	HRESULT hr = GetMediaType(iIndex, &m_mt); // ensure setup/re-use m_mt ...
	// some are indeed shared, apparently.
    if(FAILED(hr))
    {
        return hr;
    }

    *pmt = CreateMediaType(&m_mt); // a windows lib method, also does a copy for us
	if (*pmt == NULL) return E_OUTOFMEMORY;

	
    DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC);
	
    /*
	  most of these are listed as deprecated by msdn... yet some still used, apparently. odd.
	*/

    pvscc->VideoStandard = AnalogVideo_None;
    pvscc->InputSize.cx = getCaptureDesiredFinalWidth();
	pvscc->InputSize.cy = getCaptureDesiredFinalHeight();

	// most of these values are fakes..
	pvscc->MinCroppingSize.cx = getCaptureDesiredFinalWidth();
    pvscc->MinCroppingSize.cy = getCaptureDesiredFinalHeight();

    pvscc->MaxCroppingSize.cx = getCaptureDesiredFinalWidth();
    pvscc->MaxCroppingSize.cy = getCaptureDesiredFinalHeight();

    pvscc->CropGranularityX = 1;
    pvscc->CropGranularityY = 1;
    pvscc->CropAlignX = 1;
    pvscc->CropAlignY = 1;

    pvscc->MinOutputSize.cx = 1;
    pvscc->MinOutputSize.cy = 1;
    pvscc->MaxOutputSize.cx = getCaptureDesiredFinalWidth();
    pvscc->MaxOutputSize.cy = getCaptureDesiredFinalHeight();
    pvscc->OutputGranularityX = 1;
    pvscc->OutputGranularityY = 1;

    pvscc->StretchTapsX = 1; // We do 1 tap. I guess...
    pvscc->StretchTapsY = 1;
    pvscc->ShrinkTapsX = 1;
    pvscc->ShrinkTapsY = 1;

	pvscc->MinFrameInterval = m_rtFrameLength; // the larger default is actually the MinFrameInterval, not the max
	pvscc->MaxFrameInterval = 500000000; // 0.02 fps :) [though it could go lower, really...]

    pvscc->MinBitsPerSecond = (LONG) 1*1*8*GetFps(); // if in 8 bit mode 1x1. I guess.
    pvscc->MaxBitsPerSecond = (LONG) getCaptureDesiredFinalWidth()*getCaptureDesiredFinalHeight()*32*GetFps() + 44; // + 44 header size? + the palette?

	return hr;
}
Example #9
0
HRESULT STDMETHODCALLTYPE XnVideoStream::GetFormat(AM_MEDIA_TYPE **ppmt)
{
	XN_METHOD_START;

	XN_METHOD_CHECK_POINTER(ppmt);

	if (IsConnected())
	{
		*ppmt = CreateMediaType(&m_mt);
	}
	else
	{
		int iIndex = (m_nPreferredMode >= 0) ? m_nPreferredMode : 0;
		CMediaType mediaType;
		VIDEO_STREAM_CONFIG_CAPS vscc;
		GetStreamCapability(iIndex, mediaType, vscc);
		*ppmt = CreateMediaType(&mediaType);
	}

	XN_METHOD_RETURN(S_OK);
}
Example #10
0
Packet* CClip::ReturnNextVideoPacket(REFERENCE_TIME playlistOffset)
{
  CAutoLock vectorVLock(&m_sectionVectorVideo);
  if (!firstPacketReturned)
  {
//    CAutoLock lock (&m_sectionRead);
    firstPacketReturned=true;
    return ReturnNextVideoPacket(playlistOffset);
  }
  Packet* ret=NULL;
  if (sparseVideo && m_videoPmt)
  {
    ret = GenerateSparseVideo(playlistOffset);
  }
  else if (m_vecClipVideoPackets.size()>0 && m_videoPmt)
  {
    ivecVideoBuffers it = m_vecClipVideoPackets.begin();
    ret=*it;
    it=m_vecClipVideoPackets.erase(it);
  }
  if (ret)
  {
    if (ret->rtStart!=Packet::INVALID_TIME)
    {
      if (firstVideo)
      {
        ret->bDiscontinuity = clipInterrupted | bSeekTarget | clipReset;
        ret->nNewSegment = NS_STREAM_RESET;
        if (bSeekTarget) ret->nNewSegment |= NS_SEEK_TARGET; 
        if (!clipReset) ret->nNewSegment |= NS_NEW_CLIP;
        if (clipInterrupted) ret->nNewSegment |= NS_INTERRUPTED;
        firstVideo = false;
        bSeekTarget = false;
        ret->pmt = CreateMediaType(m_videoPmt);
      }

      if (ret->rtStart > videoPlaybackPosition) 
      {
        videoPlaybackPosition = ret->rtStart;
        //LogDebug("Videoplayback position (%d,%d) %I64d", nPlaylist, nClip, videoPlaybackPosition);
      }
  
      ret->rtPlaylistTime = ret->rtStart - m_playlistOffset;
      ret->rtClipStartTime = ret->rtStart -  earliestPacketAccepted;
      ret->rtStart += clipPlaylistOffset - earliestPacketAccepted;
      ret->rtStop += clipPlaylistOffset  - earliestPacketAccepted;
    }
  }
//  LogDebug("Clip: vid: return Packet rtStart: %I64d offset: %I64d seekRequired %d",ret->rtStart, ret->rtOffset,ret->bSeekRequired);
  return ret;
}
Example #11
0
bool Packet::CopyProperties(const Packet *src)
{
  StreamId = src->StreamId;
  bDiscontinuity = src->bDiscontinuity;
  bSyncPoint = src->bSyncPoint;
  bPosition = src->bPosition;
  rtStart = src->rtStart;
  rtStop = src->rtStop;
  rtPTS = src->rtPTS;
  rtDTS = src->rtDTS;
  if (src->pmt)
    pmt = CreateMediaType(src->pmt);
  dwFlags = src->dwFlags;

  return true;
}
// -------------------------------------------------------------------------
// IAMStreamConfig interface
// -------------------------------------------------------------------------
// GetFormat
// If connected, return the media type of the connection, otherwise return the
// 'preferred' format.  As DS states that the formats should be listed in descending
// order of quality, our preferred format is the best one which is the first item
// in the list.
//
STDMETHODIMP CDecklinkAudioSourcePin::GetFormat(AM_MEDIA_TYPE** ppamt)
{
	HRESULT hr = S_OK;
	CheckPointer(ppamt, E_POINTER);

	*ppamt = (AM_MEDIA_TYPE*)CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
	if (*ppamt)
	{
		AM_MEDIA_TYPE *pamt = CreateMediaType((const AM_MEDIA_TYPE*)&m_MediaType);
		*ppamt = pamt;
	}
	else
	{
		hr = E_OUTOFMEMORY;
	}

	return hr;
}
Example #13
0
Packet* CClip::GenerateFakeAudio(REFERENCE_TIME rtStart)
{
  if (rtStart + FAKE_AUDIO_DURATION - 1 > playlistFirstPacketTime + clipDuration) 
    superceeded |= SUPERCEEDED_AUDIO_RETURN;
  
  if (superceeded&SUPERCEEDED_AUDIO_RETURN) 
    return NULL;
  
  if (!FakeAudioAvailable()) 
    return NULL;

  Packet* packet = new Packet();
  packet->nClipNumber = nClip;
    
  packet->SetCount(AC3_FRAME_LENGTH);
  packet->SetData(ac3_sample, AC3_FRAME_LENGTH);
  packet->rtStart = rtStart;
  packet->rtStop = packet->rtStart + 1;

  if (firstAudio)
  {
    CMediaType pmt;
    pmt.InitMediaType();
    pmt.SetType(&MEDIATYPE_Audio);
    pmt.SetSubtype(&MEDIASUBTYPE_DOLBY_AC3);
    pmt.SetSampleSize(1);
    pmt.SetTemporalCompression(FALSE);
    pmt.SetVariableSize();
    pmt.SetFormatType(&FORMAT_WaveFormatEx);
    pmt.SetFormat(AC3AudioFormat, sizeof(AC3AudioFormat));
    WAVEFORMATEXTENSIBLE* wfe = (WAVEFORMATEXTENSIBLE*)pmt.pbFormat;
    wfe->Format.nChannels = 6;
    wfe->Format.nSamplesPerSec = 48000;
    wfe->Format.wFormatTag = WAVE_FORMAT_DOLBY_AC3;

    packet->pmt = CreateMediaType(&pmt);
  }
  
  audioPlaybackPosition += FAKE_AUDIO_DURATION;
  lastAudioPosition += FAKE_AUDIO_DURATION;

  return packet;
}
Example #14
0
HRESULT STDMETHODCALLTYPE XnVideoStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
	XN_METHOD_START;

	XN_METHOD_CHECK_POINTER(pmt);
	XN_METHOD_CHECK_POINTER(pSCC);

	xnDumpFileWriteString(m_Dump, "\tCalling %s for %d\n", __FUNCTION__, iIndex);

	CMediaType mediaType;
	VIDEO_STREAM_CONFIG_CAPS* pvscc = (VIDEO_STREAM_CONFIG_CAPS*)pSCC;
	HRESULT hr = GetStreamCapability(iIndex, mediaType, *pvscc);
	if (FAILED(hr)) XN_METHOD_RETURN(hr);

	xnDumpFileWriteString(m_Dump, "\tReturning %dx%d@%d using %s\n", m_aSupportedModes[iIndex].OutputMode.nXRes, m_aSupportedModes[iIndex].OutputMode.nYRes, m_aSupportedModes[iIndex].OutputMode.nFPS, xnPixelFormatToString(m_aSupportedModes[iIndex].Format));

	*pmt = CreateMediaType(&mediaType);
	XN_METHOD_RETURN(S_OK);
}
Example #15
0
/**
 * \brief IMediaSample::GetMediaType (retrieves media type, if it changed from previous sample)
 *
 * \param[in] This pointer to CMediaSample object
 * \param[out] ppMediaType address of variable that receives pointer to AM_MEDIA_TYPE.
 *
 * \return S_OK success
 * \return S_FALSE Media type was not changed from previous sample
 * \return E_OUTOFMEMORY Insufficient memory
 *
 * \remarks
 * If media type is not changed from previous sample, ppMediaType is null
 * If method returns S_OK caller should free memory allocated for structure
 * including pbFormat block
 */
static HRESULT STDCALL CMediaSample_GetMediaType(IMediaSample* This,
						 AM_MEDIA_TYPE** ppMediaType)
{
    AM_MEDIA_TYPE* t;
    Debug printf("CMediaSample_GetMediaType(%p) called\n", This);
    if(!ppMediaType)
	return E_INVALIDARG;
    if(!((CMediaSample*)This)->type_valid)
    {
	*ppMediaType=0;
	return 1;
    }

    t = &((CMediaSample*)This)->media_type;
    //    free(t.pbFormat);
    *ppMediaType=CreateMediaType(t);
    //    *ppMediaType=0; //media type was not changed
    return 0;
}
Example #16
0
/**
 * \brief IEnumMediaTypes:Next (retrives a specified number of media types )
 *
 * \param[in]  This pointer to CEnumMediaTypes object
 * \param[in]  cMediaTypes number of media types to retrive
 * \param[out] ppMediaTypes array of AM_MEDIA_TYPE structure pointers of size cMediaTypes
 * \param[out] pcFetched address of variables that receives number of returned media types
 *
 * \return S_OK - success
 * \return S_FALSE - did not return as meny structures as requested
 * \return E_INVALIDARG Invalid argument
 * \return E_POINTER Null pointer
 * \return VFW_E_ENUM_OUT_OF_SYNC - pin's state has changed and is now inconsistent with enumerator
 *
 */
static HRESULT STDCALL CEnumMediaTypes_Next(IEnumMediaTypes * This,
					    /* [in] */ ULONG cMediaTypes,
					    /* [size_is][out] */ AM_MEDIA_TYPE **ppMediaTypes,
					    /* [out] */ ULONG *pcFetched)
{
    AM_MEDIA_TYPE* type = &((CEnumMediaTypes*)This)->type;
    Debug printf("CEnumMediaTypes::Next(%p) called\n", This);
    if (!ppMediaTypes)
	return E_INVALIDARG;
    if (!pcFetched && (cMediaTypes!=1))
	return E_INVALIDARG;
    if (cMediaTypes <= 0)
	return 0;

    if (pcFetched)
	*pcFetched=1;
    ppMediaTypes[0] = CreateMediaType(type);

    if (cMediaTypes == 1)
	return 0;
    return 1;
}
Example #17
0
static HRESULT WINAPI IEnumMediaTypesImpl_Next(IEnumMediaTypes * iface, ULONG cMediaTypes, AM_MEDIA_TYPE ** ppMediaTypes, ULONG * pcFetched)
{
    ULONG cFetched;
    IEnumMediaTypesImpl *This = impl_from_IEnumMediaTypes(iface);

    TRACE("(%p)->(%u, %p, %p)\n", iface, cMediaTypes, ppMediaTypes, pcFetched);

    cFetched = min(This->enumMediaDetails.cMediaTypes, This->uIndex + cMediaTypes) - This->uIndex;

    if (This->currentVersion != This->mediaVersionFunction(This->basePin))
        return VFW_E_ENUM_OUT_OF_SYNC;

    TRACE("Next uIndex: %u, cFetched: %u\n", This->uIndex, cFetched);

    if (cFetched > 0)
    {
        ULONG i;
        for (i = 0; i < cFetched; i++)
            if (!(ppMediaTypes[i] = CreateMediaType(&This->enumMediaDetails.pMediaTypes[This->uIndex + i])))
            {
                while (i--)
                    DeleteMediaType(ppMediaTypes[i]);
                *pcFetched = 0;
                return E_OUTOFMEMORY;
            }
    }

    if ((cMediaTypes != 1) || pcFetched)
        *pcFetched = cFetched;

    This->uIndex += cFetched;

    if (cFetched != cMediaTypes)
        return S_FALSE;
    return S_OK;
}
Example #18
0
STDMETHODIMP
CMediaSample::GetMediaType(AM_MEDIA_TYPE **ppMediaType)
{
	ValidateReadWritePtr(ppMediaType,sizeof(AM_MEDIA_TYPE *));
	ASSERT(ppMediaType);

	/* Do we have a new media type for them */

	if (!(m_dwFlags & Sample_TypeChanged)) {
		ASSERT(m_pMediaType == NULL);
		*ppMediaType = NULL;
		return S_FALSE;
	}

	ASSERT(m_pMediaType);

	/* Create a copy of our media type */

	*ppMediaType = CreateMediaType(m_pMediaType);
	if (*ppMediaType == NULL) {
		return E_OUTOFMEMORY;
	}
	return NOERROR;
}
HRESULT STDMETHODCALLTYPE CVCamStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **ppMediaType, BYTE *pSCC)
{	 
	
	if(iIndex < 0)
		return E_INVALIDARG;
	if(iIndex > 0)
		return S_FALSE;
	if(pSCC == NULL)
		return E_POINTER;

    *ppMediaType = CreateMediaType(&m_mt);
	if (*ppMediaType == NULL) return E_OUTOFMEMORY;

    DECLARE_PTR(WAVEFORMATEX, pAudioFormat, (*ppMediaType)->pbFormat);
	
	AM_MEDIA_TYPE * pm = *ppMediaType;
	
	setupPwfex(pAudioFormat, pm);

	AUDIO_STREAM_CONFIG_CAPS* pASCC = (AUDIO_STREAM_CONFIG_CAPS*) pSCC;
	ZeroMemory(pSCC, sizeof(AUDIO_STREAM_CONFIG_CAPS)); 

	// Set up audio capabilities [one type only, for now]
	pASCC->guid = MEDIATYPE_Audio;
	pASCC->MaximumChannels = pAudioFormat->nChannels;
	pASCC->MinimumChannels = pAudioFormat->nChannels;
	pASCC->ChannelsGranularity = 1; // doesn't matter
	pASCC->MaximumSampleFrequency = pAudioFormat->nSamplesPerSec;
	pASCC->MinimumSampleFrequency = pAudioFormat->nSamplesPerSec;
	pASCC->SampleFrequencyGranularity = 11025; // doesn't matter
	pASCC->MaximumBitsPerSample = pAudioFormat->wBitsPerSample;
	pASCC->MinimumBitsPerSample = pAudioFormat->wBitsPerSample;
	pASCC->BitsPerSampleGranularity = 16; // doesn't matter

	return S_OK;
}
Example #20
0
HRESULT CBaseSplitterFilter::DeliverPacket(CAutoPtr<Packet> p)
{
    HRESULT hr = S_FALSE;

    CBaseSplitterOutputPin* pPin = GetOutputPin(p->TrackNumber);
    if (!pPin || !pPin->IsConnected() || !m_pActivePins.Find(pPin)) {
        return S_FALSE;
    }

    if (p->rtStart != Packet::INVALID_TIME) {
        m_rtCurrent = p->rtStart;

        p->rtStart -= m_rtStart;
        p->rtStop -= m_rtStart;

        ASSERT(p->rtStart <= p->rtStop);
    }

    {
        CAutoLock cAutoLock(&m_csmtnew);

        CMediaType mt;
        if (m_mtnew.Lookup(p->TrackNumber, mt)) {
            p->pmt = CreateMediaType(&mt);
            m_mtnew.RemoveKey(p->TrackNumber);
        }
    }

    if (!m_bDiscontinuitySent.Find(p->TrackNumber)) {
        p->bDiscontinuity = TRUE;
    }

    DWORD TrackNumber = p->TrackNumber;
    BOOL bDiscontinuity = p->bDiscontinuity;

#if defined(_DEBUG) && 0
    TRACE(_T("[%d]: d%d s%d p%d, b=%d, [%20I64d - %20I64d]\n"),
          p->TrackNumber,
          p->bDiscontinuity, p->bSyncPoint, p->rtStart != Packet::INVALID_TIME && p->rtStart < 0,
          p->GetCount(), p->rtStart, p->rtStop);
#endif

    hr = pPin->QueuePacket(p);

    if (S_OK != hr) {
        if (POSITION pos = m_pActivePins.Find(pPin)) {
            m_pActivePins.RemoveAt(pos);
        }

        if (!m_pActivePins.IsEmpty()) { // only die when all pins are down
            hr = S_OK;
        }

        return hr;
    }

    if (bDiscontinuity) {
        m_bDiscontinuitySent.AddTail(TrackNumber);
    }

    return hr;
}
Example #21
0
HRESULT STDMETHODCALLTYPE CVCamPin::GetFormat(AM_MEDIA_TYPE **ppmt)
{
    *ppmt = CreateMediaType(&m_mt);
    return S_OK;
}
Example #22
0
// pdwGroup value is set to:
//  - 0 if the track isn't controlled by any underlying IAMStreamSelect interface
//  - 1 if the track is controlled by an underlying IAMStreamSelect interface and is not selected at that level
//  - 2 if the track is controlled by an underlying IAMStreamSelect interface and is selected at that level
STDMETHODIMP CStreamSwitcherFilter::Info(long lIndex, AM_MEDIA_TYPE** ppmt, DWORD* pdwFlags, LCID* plcid, DWORD* pdwGroup, WCHAR** ppszName, IUnknown** ppObject, IUnknown** ppUnk)
{
    CAutoLock cAutoLock(&m_csPins);

    IUnknown* pObject = nullptr;
    bool bFound = false;
    POSITION pos = m_pInputs.GetHeadPosition();
    while (pos && !bFound) {
        CStreamSwitcherInputPin* pInputPin = m_pInputs.GetNext(pos);

        if (pInputPin->IsConnected()) {
            if (CComPtr<IAMStreamSelect> pSSF = pInputPin->GetStreamSelectionFilter()) {
                DWORD cStreams = 0;
                HRESULT hr = pSSF->Count(&cStreams);
                if (SUCCEEDED(hr)) {
                    for (int i = 0; i < (int)cStreams; i++) {
                        AM_MEDIA_TYPE* pmt = nullptr;
                        DWORD dwFlags;
                        LPWSTR pszName = nullptr;
                        hr = pSSF->Info(i, &pmt, &dwFlags, plcid, NULL, &pszName, nullptr, nullptr);
                        if (SUCCEEDED(hr) && pmt && pmt->majortype == MEDIATYPE_Audio) {
                            if (lIndex == 0) {
                                bFound = true;
                                pObject = pSSF;

                                if (ppmt) {
                                    *ppmt = pmt;
                                } else {
                                    DeleteMediaType(pmt);
                                }

                                if (pdwFlags) {
                                    *pdwFlags = (m_pInput == pInputPin) ? dwFlags : 0;
                                }

                                if (pdwGroup) {
                                    *pdwGroup = (dwFlags & (AMSTREAMSELECTINFO_ENABLED | AMSTREAMSELECTINFO_EXCLUSIVE)) ? 2 : 1;
                                }

                                if (ppszName) {
                                    *ppszName = pszName;
                                } else {
                                    CoTaskMemFree(pszName);
                                }

                                break;
                            } else {
                                lIndex--;
                            }
                        }
                        DeleteMediaType(pmt);
                        CoTaskMemFree(pszName);
                    }
                }
            } else if (lIndex == 0) {
                bFound = true;

                if (ppmt) {
                    *ppmt = CreateMediaType(&m_pOutput->CurrentMediaType());
                }

                if (pdwFlags) {
                    *pdwFlags = (m_pInput == pInputPin) ? AMSTREAMSELECTINFO_EXCLUSIVE : 0;
                }

                if (plcid) {
                    *plcid = 0;
                }

                if (pdwGroup) {
                    *pdwGroup = 0;
                }

                if (ppszName) {
                    *ppszName = (WCHAR*)CoTaskMemAlloc((wcslen(pInputPin->Name()) + 1) * sizeof(WCHAR));
                    if (*ppszName) {
                        wcscpy_s(*ppszName, wcslen(pInputPin->Name()) + 1, pInputPin->Name());
                    }
                }
            } else {
                lIndex--;
            }
        }
    }

    if (!bFound) {
        return E_INVALIDARG;
    }

    if (ppObject) {
        *ppObject = pObject;
        if (pObject) {
            pObject->AddRef();
        }
    }

    if (ppUnk) {
        *ppUnk = nullptr;
    }

    return S_OK;
}
Example #23
0
HRESULT STDMETHODCALLTYPE CVCamStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{

	unsigned int width, height;

    *pmt = CreateMediaType(&m_mt);
    DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat);

	if (iIndex == 0) iIndex = 1;

	if(g_Width == 0 || g_Height == 0) {
		width  = 320;
		height = 240;
	}
	else {
		// as per sending app
		width	=  g_Width;
		height	=  g_Height;
	}

	pvi->bmiHeader.biCompression	= BI_RGB;
    pvi->bmiHeader.biBitCount		= 24;
    pvi->bmiHeader.biSize			= sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth			= (LONG)width;
    pvi->bmiHeader.biHeight			= (LONG)height;
    pvi->bmiHeader.biPlanes			= 1;
    pvi->bmiHeader.biSizeImage		= GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant	= 0;

    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

    (*pmt)->majortype				= MEDIATYPE_Video;
    (*pmt)->subtype					= MEDIASUBTYPE_RGB24;
    (*pmt)->formattype				= FORMAT_VideoInfo;
    (*pmt)->bTemporalCompression	= false;
    (*pmt)->bFixedSizeSamples		= false;
    (*pmt)->lSampleSize				= pvi->bmiHeader.biSizeImage;
    (*pmt)->cbFormat				= sizeof(VIDEOINFOHEADER);
    
    DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC);
    
    pvscc->guid = FORMAT_VideoInfo;
    pvscc->VideoStandard = AnalogVideo_None;
	// Native size of the incoming video signal. 
	// For a compressor, the size is taken from the input pin.
	// For a capture filter, the size is the largest signal the filter 
	// can digitize with every pixel remaining unique.
	// Note  Deprecated.
    pvscc->InputSize.cx			= 1920;
    pvscc->InputSize.cy			= 1080;
    pvscc->MinCroppingSize.cx	= 0; // LJ was 80 but we don't want to limit it
    pvscc->MinCroppingSize.cy	= 0; // was 60
    pvscc->MaxCroppingSize.cx	= 1920;
    pvscc->MaxCroppingSize.cy	= 1080;
    pvscc->CropGranularityX		= 1; // seems 1 is not necessary
    pvscc->CropGranularityY		= 1;
    pvscc->CropAlignX = 0;
    pvscc->CropAlignY = 0;

    pvscc->MinOutputSize.cx		= 80; // LJ fair enough
    pvscc->MinOutputSize.cy		= 60;
    pvscc->MaxOutputSize.cx		= 1920; // 1080p
    pvscc->MaxOutputSize.cy		= 1080;
    pvscc->OutputGranularityX	= 1;
    pvscc->OutputGranularityY	= 1;
    pvscc->StretchTapsX			= 0;
    pvscc->StretchTapsY			= 0;
    pvscc->ShrinkTapsX			= 0;
    pvscc->ShrinkTapsY			= 0;
	pvscc->MinFrameInterval = 166667;   // 60 fps 333333; // 30fps  // LJ what is the consequence of this ?
    pvscc->MaxFrameInterval = 50000000; // 0.2 fps
    pvscc->MinBitsPerSecond = (80 * 60 * 3 * 8) / 5;
    pvscc->MaxBitsPerSecond = 1920 * 1080 * 3 * 8 * 30; // (integral overflow at 60 - anyway we lock on to 30fps and 1920 might not achieve 60fps)

    return S_OK;
}
Example #24
0
HRESULT CMediaSample::SetProperties(
									DWORD cbProperties,
									const BYTE * pbProperties
									)
{

	/*  Generic properties */
	AM_MEDIA_TYPE *pMediaType = NULL;

	if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, cbData, cbProperties)) {
		CheckPointer(pbProperties, E_POINTER);
		AM_SAMPLE2_PROPERTIES *pProps =
			(AM_SAMPLE2_PROPERTIES *)pbProperties;

		/*  Don't use more data than is actually there */
		if (pProps->cbData < cbProperties) {
			cbProperties = pProps->cbData;
		}
		/*  We only handle IMediaSample2 */
		if (cbProperties > sizeof(*pProps) ||
			pProps->cbData > sizeof(*pProps)) {
				return E_INVALIDARG;
		}
		/*  Do checks first, the assignments (for backout) */
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, dwSampleFlags, cbProperties)) {
			/*  Check the flags */
			if (pProps->dwSampleFlags &
				(~Sample_ValidFlags | Sample_MediaTimeValid)) {
					return E_INVALIDARG;
			}
			/*  Check a flag isn't being set for a property
			not being provided
			*/
			if ((pProps->dwSampleFlags & AM_SAMPLE_TIMEVALID) &&
				!(m_dwFlags & AM_SAMPLE_TIMEVALID) &&
				!CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, tStop, cbProperties)) {
					return E_INVALIDARG;
			}
		}
		/*  NB - can't SET the pointer or size */
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, pbBuffer, cbProperties)) {

			/*  Check pbBuffer */
			if (pProps->pbBuffer != 0 && pProps->pbBuffer != m_pBuffer) {
				return E_INVALIDARG;
			}
		}
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, cbBuffer, cbProperties)) {

			/*  Check cbBuffer */
			if (pProps->cbBuffer != 0 && pProps->cbBuffer != m_cbBuffer) {
				return E_INVALIDARG;
			}
		}
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, cbBuffer, cbProperties) &&
			CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, lActual, cbProperties)) {

				/*  Check lActual */
				if (pProps->cbBuffer < pProps->lActual) {
					return E_INVALIDARG;
				}
		}

		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, pMediaType, cbProperties)) {

			/*  Check pMediaType */
			if (pProps->dwSampleFlags & AM_SAMPLE_TYPECHANGED) {
				CheckPointer(pProps->pMediaType, E_POINTER);
				pMediaType = CreateMediaType(pProps->pMediaType);
				if (pMediaType == NULL) {
					return E_OUTOFMEMORY;
				}
			}
		}

		/*  Now do the assignments */
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, dwStreamId, cbProperties)) {
			m_dwStreamId = pProps->dwStreamId;
		}
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, dwSampleFlags, cbProperties)) {
			/*  Set the flags */
			m_dwFlags = pProps->dwSampleFlags |
				(m_dwFlags & Sample_MediaTimeValid);
			m_dwTypeSpecificFlags = pProps->dwTypeSpecificFlags;
		} else {
			if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, dwTypeSpecificFlags, cbProperties)) {
				m_dwTypeSpecificFlags = pProps->dwTypeSpecificFlags;
			}
		}

		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, lActual, cbProperties)) {
			/*  Set lActual */
			m_lActual = pProps->lActual;
		}

		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, tStop, cbProperties)) {

			/*  Set the times */
			m_End   = pProps->tStop;
		}
		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, tStart, cbProperties)) {

			/*  Set the times */
			m_Start = pProps->tStart;
		}

		if (CONTAINS_FIELD(AM_SAMPLE2_PROPERTIES, pMediaType, cbProperties)) {
			/*  Set pMediaType */
			if (pProps->dwSampleFlags & AM_SAMPLE_TYPECHANGED) {
				if (m_pMediaType != NULL) {
					DeleteMediaType(m_pMediaType);
				}
				m_pMediaType = pMediaType;
			}
		}

		/*  Fix up the type changed flag to correctly reflect the current state
		If, for instance the input contained no type change but the
		output does then if we don't do this we'd lose the
		output media type.
		*/
		if (m_pMediaType) {
			m_dwFlags |= Sample_TypeChanged;
		} else {
			m_dwFlags &= ~Sample_TypeChanged;
		}
	}

	return S_OK;
}
Example #25
0
void CClip::SetVideoPMT(AM_MEDIA_TYPE *pmt)
{
  if (m_videoPmt) DeleteMediaType(m_videoPmt);
  m_videoPmt = CreateMediaType(pmt);
}
HRESULT CAVIVideoPin::Deliver(IMediaSample *pSample)
{
     CComPtr<IMediaSample> pSampleDeliver = pSample;
    if (pSample->IsDiscontinuity() == S_OK)
    {
        if (m_seqHParser.isInited())
        {
             BYTE* pBuffer = NULL;
            pSample->GetPointer(&pBuffer);
            long len = pSample->GetActualDataLength();
            if (!m_seqHParser.ParseFrame(pBuffer, len))
            {
                // get it from the first sample
                const INDEXENTRY& entry = (*m_pIndex)[0];
                BYTE* pTempData = new BYTE[entry.dwChunkLength];
                 CAVIScanner* scanner = m_pSplitter->GetAVIScanner();
                scanner->ReadData(entry.chunkOffset, entry.dwChunkLength, pTempData); 
                BYTE* pDecoderConfig = NULL;
                int decoderConfigLen = 0;
                if (m_seqHParser.ParseFrame(pTempData, entry.dwChunkLength))
                {
                    decoderConfigLen = m_seqHParser.VisualObjSeq().obj_len+m_seqHParser.Pic_Parameter_Set().obj_len;
                    pDecoderConfig = new BYTE[decoderConfigLen];
                    memcpy(pDecoderConfig, m_seqHParser.VisualObjSeq().obj, m_seqHParser.VisualObjSeq().obj_len);
                    if (m_seqHParser.Pic_Parameter_Set().obj_len)
                        memcpy(pDecoderConfig+m_seqHParser.VisualObjSeq().obj_len, m_seqHParser.Pic_Parameter_Set().obj, m_seqHParser.Pic_Parameter_Set().obj_len);

                     long newsampleLen = decoderConfigLen+len;
                    CComPtr<IMediaSample> pNewSample;
                    HRESULT hr = GetNewSample(&pNewSample, newsampleLen);
                    ASSERT(hr == S_OK);
                    if (SUCCEEDED(hr))
                    {
                        CComPtr<IMediaSample2> pSmp = NULL;
                        pSample->QueryInterface(&pSmp);
                        AM_SAMPLE2_PROPERTIES props_in = {0};
                        hr = pSmp->GetProperties(sizeof( AM_SAMPLE2_PROPERTIES ), (BYTE*)&props_in);

                        CComPtr<IMediaSample2> pSmp2 = NULL;
                        pNewSample->QueryInterface(&pSmp2);

                        AM_SAMPLE2_PROPERTIES props_out = {0};
                        hr = pSmp2->GetProperties(sizeof( AM_SAMPLE2_PROPERTIES ), (BYTE*)&props_out);
                      
                      
                        props_out.dwTypeSpecificFlags = props_in.dwTypeSpecificFlags;
                        props_out.dwSampleFlags = props_in.dwSampleFlags;
                        props_out.lActual = decoderConfigLen+len;
                        props_out.tStart = props_in.tStart;
                        props_out.tStop = props_in.tStop;
                        props_out.dwStreamId = props_in.dwStreamId;
                        if (props_in.pMediaType)
                            props_out.pMediaType = CreateMediaType(props_in.pMediaType);

                        memcpy(props_out.pbBuffer, pDecoderConfig, decoderConfigLen);
                        memcpy(props_out.pbBuffer+decoderConfigLen, pBuffer, len);
                        hr = pSmp2->SetProperties(sizeof( AM_SAMPLE2_PROPERTIES ), (BYTE*)&props_out);
                        pSampleDeliver = pNewSample; 
                    }

                    delete pDecoderConfig;

                }
                delete pTempData;

            }

        }      
    }
    return CAVIOutputPin::Deliver(pSampleDeliver);
}
Example #27
0
HRESULT STDMETHODCALLTYPE CKCamStream::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC)
{
	if (!m_device)
		return E_FAIL;

    *pmt = CreateMediaType(&m_mt);
    DECLARE_PTR(VIDEOINFOHEADER, pvi, (*pmt)->pbFormat);

	DbgLog((LOG_TRACE, 1, "GetStreamCaps (iPosition = %d)", iIndex));

	auto f_devres = m_device->video_resolution(iIndex);

    pvi->bmiHeader.biCompression = CompressionFromPixelFormat(f_devres.m_pixel_format);
    pvi->bmiHeader.biBitCount    = f_devres.m_bits_per_pixel;
    pvi->bmiHeader.biSize        = sizeof(BITMAPINFOHEADER);
    pvi->bmiHeader.biWidth       = f_devres.m_width;
    pvi->bmiHeader.biHeight      = f_devres.m_height;
    pvi->bmiHeader.biPlanes      = 1;
    pvi->bmiHeader.biSizeImage   = GetBitmapSize(&pvi->bmiHeader);
    pvi->bmiHeader.biClrImportant = 0;

    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle

    (*pmt)->majortype				= MEDIATYPE_Video;
    (*pmt)->subtype					= MediaSubTypeFromPixelFormat(f_devres.m_pixel_format);
    (*pmt)->formattype				= FORMAT_VideoInfo;
    (*pmt)->bTemporalCompression	= FALSE;
    (*pmt)->bFixedSizeSamples		= FALSE;
    (*pmt)->lSampleSize				= pvi->bmiHeader.biSizeImage;
    (*pmt)->cbFormat				= sizeof(VIDEOINFOHEADER);
    
    DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC);
    
    pvscc->guid					= FORMAT_VideoInfo;
    pvscc->VideoStandard		= AnalogVideo_None;
    pvscc->InputSize.cx			= f_devres.m_width;
    pvscc->InputSize.cy			= f_devres.m_height;
    pvscc->MinCroppingSize.cx	= f_devres.m_width;
    pvscc->MinCroppingSize.cy	= f_devres.m_height;
    pvscc->MaxCroppingSize.cx	= f_devres.m_width;
    pvscc->MaxCroppingSize.cy	= f_devres.m_height;
    pvscc->CropGranularityX		= 0;
    pvscc->CropGranularityY		= 0;
    pvscc->CropAlignX			= 0;
    pvscc->CropAlignY			= 0;

    pvscc->MinOutputSize.cx		= f_devres.m_width;
    pvscc->MinOutputSize.cy		= f_devres.m_height;
    pvscc->MaxOutputSize.cx		= f_devres.m_width;
    pvscc->MaxOutputSize.cy		= f_devres.m_height;
    pvscc->OutputGranularityX	= 0;
    pvscc->OutputGranularityY	= 0;
    pvscc->StretchTapsX			= 0;
    pvscc->StretchTapsY			= 0;
    pvscc->ShrinkTapsX			= 0;
    pvscc->ShrinkTapsY			= 0;

	int bitsPerFrame = f_devres.m_width * f_devres.m_height * f_devres.m_bits_per_pixel;
    pvscc->MinFrameInterval = FrameIntervalFromRate(f_devres.m_framerate);
    pvscc->MaxFrameInterval = FrameIntervalFromRate(10);
    pvscc->MinBitsPerSecond = bitsPerFrame * 10;
    pvscc->MaxBitsPerSecond = bitsPerFrame * f_devres.m_framerate;

    return S_OK;
}
Example #28
0
HRESULT STDMETHODCALLTYPE CSimpleVirtualCamFilterStream::GetFormat(AM_MEDIA_TYPE **ppmt)
{
	*ppmt = CreateMediaType(&m_mt);
	return S_OK;
}