HRESULT recChannel_t::get_AM_MEDIA_TYPE(char * newFormat ,AM_MEDIA_TYPE ** mediaFormat) { __CONTEXT("recChannel_t::get_AM_MEDIA_TYPE"); ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList(); for(int i = 0; i<auxFormats.len() ; i++) { AM_MEDIA_TYPE *format = auxFormats.nth(i); char subtypeName [100]; memset(subtypeName,0,100); GetGUIDString(subtypeName,&format->subtype); VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) format->pbFormat; if((pVih==NULL && strcmp(newFormat,sourceFormat)==0) || (pVih->bmiHeader.biHeight == capInfo.heigth && pVih->bmiHeader.biWidth == capInfo.width && strcmp(subtypeName,newFormat)==0)) { *mediaFormat = format; return 0; } } return -1; }
void CDVSColorPPage::UpdateControlData(bool fSave) { int nCountFmts = _countof(VSFilterDefaultFormats); if (fSave) { if ((UINT)m_preflist.GetCount() == nCountFmts) { BYTE* pData = DNew BYTE[nCountFmts]; for (ptrdiff_t i = 0; i < m_preflist.GetCount(); i++) { pData[i] = (BYTE)m_preflist.GetItemData(i); } theApp.WriteProfileBinary(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_COLORFORMATS), pData, nCountFmts); delete [] pData; } else { ASSERT(0); } theApp.WriteProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_FORCERGB), !!m_forcergb.GetCheck()); } else { m_preflist.ResetContent(); m_dynchglist.ResetContent(); BYTE* pData = NULL; UINT nSize = 0; if (!theApp.GetProfileBinary(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_COLORFORMATS), &pData, &nSize) || !pData || nSize != nCountFmts) { if (pData) { delete [] pData, pData = NULL; } nSize = nCountFmts; pData = DNew BYTE[nCountFmts]; for (UINT i = 0; i < nSize; i++) { pData[i] = i; } } if (pData) { for (UINT i = 0; i < nSize; i++) { CString guid = GetGUIDString(*VSFilterDefaultFormats[i].subtype); if (!guid.Left(13).CompareNoCase(_T("MEDIASUBTYPE_"))) { guid = guid.Mid(13); } m_dynchglist.AddString(guid); m_dynchglist.SetItemData(i, pData[i]); m_preflist.AddString(guid); m_preflist.SetItemData(i, pData[i]); } int iPosition = -1; m_pDirectVobSub->get_ColorFormat(&iPosition); m_dynchglist.SetCurSel(iPosition); delete [] pData; } m_forcergb.SetCheck(theApp.GetProfileInt(ResStr(IDS_R_GENERAL), ResStr(IDS_RG_FORCERGB), 0)?BST_CHECKED:BST_UNCHECKED); } }
void CMediaTypeEx::Dump(CAtlList<CString>& sl) { CString str; sl.RemoveAll(); ULONG fmtsize = 0; CString major = CStringFromGUID(majortype); CString sub = CStringFromGUID(subtype); CString format = CStringFromGUID(formattype); sl.AddTail(ToString()); sl.AddTail(_T("")); sl.AddTail(_T("AM_MEDIA_TYPE: ")); str.Format(_T("majortype: %s %s"), GetGUIDString(majortype), major); sl.AddTail(str); str.Format(_T("subtype: %s %s"), GetGUIDString(subtype), sub); sl.AddTail(str); str.Format(_T("formattype: %s %s"), GetGUIDString(formattype), format); sl.AddTail(str); str.Format(_T("bFixedSizeSamples: %d"), bFixedSizeSamples); sl.AddTail(str); str.Format(_T("bTemporalCompression: %d"), bTemporalCompression); sl.AddTail(str); str.Format(_T("lSampleSize: %u"), lSampleSize); sl.AddTail(str); str.Format(_T("cbFormat: %u"), cbFormat); sl.AddTail(str); sl.AddTail(_T("")); if (formattype == FORMAT_VideoInfo || formattype == FORMAT_VideoInfo2 || formattype == FORMAT_MPEGVideo || formattype == FORMAT_MPEG2_VIDEO) { fmtsize = formattype == FORMAT_VideoInfo ? sizeof(VIDEOINFOHEADER) : formattype == FORMAT_VideoInfo2 ? sizeof(VIDEOINFOHEADER2) : formattype == FORMAT_MPEGVideo ? sizeof(MPEG1VIDEOINFO)-1 : formattype == FORMAT_MPEG2_VIDEO ? sizeof(MPEG2VIDEOINFO)-4 : 0; VIDEOINFOHEADER& vih = *(VIDEOINFOHEADER*)pbFormat; BITMAPINFOHEADER* bih = &vih.bmiHeader; sl.AddTail(_T("VIDEOINFOHEADER:")); str.Format(_T("rcSource: (%d,%d)-(%d,%d)"), vih.rcSource.left, vih.rcSource.top, vih.rcSource.right, vih.rcSource.bottom); sl.AddTail(str); str.Format(_T("rcTarget: (%d,%d)-(%d,%d)"), vih.rcTarget.left, vih.rcTarget.top, vih.rcTarget.right, vih.rcTarget.bottom); sl.AddTail(str); str.Format(_T("dwBitRate: %u"), vih.dwBitRate); sl.AddTail(str); str.Format(_T("dwBitErrorRate: %u"), vih.dwBitErrorRate); sl.AddTail(str); str.Format(_T("AvgTimePerFrame: %I64d"), vih.AvgTimePerFrame); sl.AddTail(str); sl.AddTail(_T("")); if (formattype == FORMAT_VideoInfo2 || formattype == FORMAT_MPEG2_VIDEO) { VIDEOINFOHEADER2& vih2 = *(VIDEOINFOHEADER2*)pbFormat; bih = &vih2.bmiHeader; sl.AddTail(_T("VIDEOINFOHEADER2:")); str.Format(_T("dwInterlaceFlags: 0x%08x"), vih2.dwInterlaceFlags); sl.AddTail(str); str.Format(_T("dwCopyProtectFlags: 0x%08x"), vih2.dwCopyProtectFlags); sl.AddTail(str); str.Format(_T("dwPictAspectRatioX: %u"), vih2.dwPictAspectRatioX); sl.AddTail(str); str.Format(_T("dwPictAspectRatioY: %u"), vih2.dwPictAspectRatioY); sl.AddTail(str); str.Format(_T("dwControlFlags: 0x%08x"), vih2.dwControlFlags); sl.AddTail(str); str.Format(_T("dwReserved2: 0x%08x"), vih2.dwReserved2); sl.AddTail(str); sl.AddTail(_T("")); } if (formattype == FORMAT_MPEGVideo) { MPEG1VIDEOINFO& mvih = *(MPEG1VIDEOINFO*)pbFormat; sl.AddTail(_T("MPEG1VIDEOINFO:")); str.Format(_T("dwStartTimeCode: %u"), mvih.dwStartTimeCode); sl.AddTail(str); str.Format(_T("cbSequenceHeader: %u"), mvih.cbSequenceHeader); sl.AddTail(str); sl.AddTail(_T("")); } else if (formattype == FORMAT_MPEG2_VIDEO) { MPEG2VIDEOINFO& mvih = *(MPEG2VIDEOINFO*)pbFormat; sl.AddTail(_T("MPEG2VIDEOINFO:")); str.Format(_T("dwStartTimeCode: %d"), mvih.dwStartTimeCode); sl.AddTail(str); str.Format(_T("cbSequenceHeader: %d"), mvih.cbSequenceHeader); sl.AddTail(str); str.Format(_T("dwProfile: 0x%08x"), mvih.dwProfile); sl.AddTail(str); str.Format(_T("dwLevel: 0x%08x"), mvih.dwLevel); sl.AddTail(str); str.Format(_T("dwFlags: 0x%08x"), mvih.dwFlags); sl.AddTail(str); sl.AddTail(_T("")); } sl.AddTail(_T("BITMAPINFOHEADER:")); str.Format(_T("biSize: %u"), bih->biSize); sl.AddTail(str); str.Format(_T("biWidth: %d"), bih->biWidth); sl.AddTail(str); str.Format(_T("biHeight: %d"), bih->biHeight); sl.AddTail(str); str.Format(_T("biPlanes: %u"), bih->biPlanes); sl.AddTail(str); str.Format(_T("biBitCount: %u"), bih->biBitCount); sl.AddTail(str); if (bih->biCompression < 256) { str.Format(_T("biCompression: %u"), bih->biCompression); } else { str.Format(_T("biCompression: %4.4hs"), &bih->biCompression); } sl.AddTail(str); str.Format(_T("biSizeImage: %d"), bih->biSizeImage); sl.AddTail(str); str.Format(_T("biXPelsPerMeter: %d"), bih->biXPelsPerMeter); sl.AddTail(str); str.Format(_T("biYPelsPerMeter: %d"), bih->biYPelsPerMeter); sl.AddTail(str); str.Format(_T("biClrUsed: %u"), bih->biClrUsed); sl.AddTail(str); str.Format(_T("biClrImportant: %u"), bih->biClrImportant); sl.AddTail(str); sl.AddTail(_T("")); } else if (formattype == FORMAT_WaveFormatEx || formattype == FORMAT_WaveFormatExFFMPEG) { WAVEFORMATEX *pWfe = NULL; if (formattype == FORMAT_WaveFormatExFFMPEG) { fmtsize = sizeof(WAVEFORMATEXFFMPEG); WAVEFORMATEXFFMPEG *wfeff = (WAVEFORMATEXFFMPEG*)pbFormat; pWfe = &wfeff->wfex; sl.AddTail(_T("WAVEFORMATEXFFMPEG:")); str.Format(_T("nCodecId: 0x%04x"), wfeff->nCodecId); sl.AddTail(str); sl.AddTail(_T("")); } else { fmtsize = sizeof(WAVEFORMATEX); pWfe = (WAVEFORMATEX*)pbFormat; } WAVEFORMATEX& wfe = *pWfe; sl.AddTail(_T("WAVEFORMATEX:")); str.Format(_T("wFormatTag: 0x%04x"), wfe.wFormatTag); sl.AddTail(str); str.Format(_T("nChannels: %u"), wfe.nChannels); sl.AddTail(str); str.Format(_T("nSamplesPerSec: %u"), wfe.nSamplesPerSec); sl.AddTail(str); str.Format(_T("nAvgBytesPerSec: %u"), wfe.nAvgBytesPerSec); sl.AddTail(str); str.Format(_T("nBlockAlign: %u"), wfe.nBlockAlign); sl.AddTail(str); str.Format(_T("wBitsPerSample: %u"), wfe.wBitsPerSample); sl.AddTail(str); str.Format(_T("cbSize: %u (extra bytes)"), wfe.cbSize); sl.AddTail(str); sl.AddTail(_T("")); if (wfe.wFormatTag != WAVE_FORMAT_PCM && wfe.cbSize > 0 && formattype == FORMAT_WaveFormatEx) { if (wfe.wFormatTag == WAVE_FORMAT_EXTENSIBLE && wfe.cbSize == sizeof(WAVEFORMATEXTENSIBLE)-sizeof(WAVEFORMATEX)) { fmtsize = sizeof(WAVEFORMATEXTENSIBLE); WAVEFORMATEXTENSIBLE& wfex = *(WAVEFORMATEXTENSIBLE*)pbFormat; sl.AddTail(_T("WAVEFORMATEXTENSIBLE:")); if (wfex.Format.wBitsPerSample != 0) { str.Format(_T("wValidBitsPerSample: %u"), wfex.Samples.wValidBitsPerSample); } else { str.Format(_T("wSamplesPerBlock: %u"), wfex.Samples.wSamplesPerBlock); } sl.AddTail(str); str.Format(_T("dwChannelMask: 0x%08x"), wfex.dwChannelMask); sl.AddTail(str); str.Format(_T("SubFormat: %s"), CStringFromGUID(wfex.SubFormat)); sl.AddTail(str); sl.AddTail(_T("")); } else if (wfe.wFormatTag == WAVE_FORMAT_DOLBY_AC3 && wfe.cbSize == sizeof(DOLBYAC3WAVEFORMAT)-sizeof(WAVEFORMATEX)) { fmtsize = sizeof(DOLBYAC3WAVEFORMAT); DOLBYAC3WAVEFORMAT& ac3wf = *(DOLBYAC3WAVEFORMAT*)pbFormat; sl.AddTail(_T("DOLBYAC3WAVEFORMAT:")); str.Format(_T("bBigEndian: %u"), ac3wf.bBigEndian); sl.AddTail(str); str.Format(_T("bsid: %u"), ac3wf.bsid); sl.AddTail(str); str.Format(_T("lfeon: %u"), ac3wf.lfeon); sl.AddTail(str); str.Format(_T("copyrightb: %u"), ac3wf.copyrightb); sl.AddTail(str); str.Format(_T("nAuxBitsCode: %u"), ac3wf.nAuxBitsCode); sl.AddTail(str); sl.AddTail(_T("")); } } } else if (formattype == FORMAT_VorbisFormat) { fmtsize = sizeof(VORBISFORMAT); VORBISFORMAT& vf = *(VORBISFORMAT*)pbFormat; sl.AddTail(_T("VORBISFORMAT:")); str.Format(_T("nChannels: %u"), vf.nChannels); sl.AddTail(str); str.Format(_T("nSamplesPerSec: %u"), vf.nSamplesPerSec); sl.AddTail(str); str.Format(_T("nMinBitsPerSec: %u"), vf.nMinBitsPerSec); sl.AddTail(str); str.Format(_T("nAvgBitsPerSec: %u"), vf.nAvgBitsPerSec); sl.AddTail(str); str.Format(_T("nMaxBitsPerSec: %u"), vf.nMaxBitsPerSec); sl.AddTail(str); str.Format(_T("fQuality: %.3f"), vf.fQuality); sl.AddTail(str); sl.AddTail(_T("")); } else if (formattype == FORMAT_VorbisFormat2) { fmtsize = sizeof(VORBISFORMAT2); VORBISFORMAT2& vf = *(VORBISFORMAT2*)pbFormat; sl.AddTail(_T("VORBISFORMAT:")); str.Format(_T("Channels: %u"), vf.Channels); sl.AddTail(str); str.Format(_T("SamplesPerSec: %u"), vf.SamplesPerSec); sl.AddTail(str); str.Format(_T("BitsPerSample: %u"), vf.BitsPerSample); sl.AddTail(str); str.Format(_T("HeaderSize: {%u, %u, %u}"), vf.HeaderSize[0], vf.HeaderSize[1], vf.HeaderSize[2]); sl.AddTail(str); sl.AddTail(_T("")); } else if (formattype == FORMAT_SubtitleInfo) { fmtsize = sizeof(SUBTITLEINFO); SUBTITLEINFO& si = *(SUBTITLEINFO*)pbFormat; sl.AddTail(_T("SUBTITLEINFO:")); str.Format(_T("dwOffset: %u"), si.dwOffset); sl.AddTail(str); str.Format(_T("IsoLang: %s"), CString(CStringA(si.IsoLang, _countof(si.IsoLang) - 1))); sl.AddTail(str); str.Format(_T("TrackName: %s"), CString(si.TrackName, _countof(si.TrackName) - 1)); sl.AddTail(str); sl.AddTail(_T("")); } if (fmtsize < cbFormat) { // extra and unknown data ULONG extrasize = cbFormat - fmtsize; str.Format(_T("Extradata: %u"), extrasize); sl.AddTail(str); for (ULONG i = 0, j = (extrasize + 15) & ~15; i < j; i += 16) { str.Format(_T("%04x:"), i); ULONG line_end = min(i + 16, extrasize); for (ULONG k = i; k < line_end; k++) { str.AppendFormat(_T(" %02x"), pbFormat[fmtsize + k]); } for (ULONG k = line_end, l = i + 16; k < l; k++) { str += _T(" "); } str += ' '; CStringA ch; for (size_t k = i; k < line_end; k++) { unsigned char c = (unsigned char)pbFormat[fmtsize + k]; ch.AppendFormat("%c", c >= 0x20 ? c : '.'); } str += ch; sl.AddTail(str); } sl.AddTail(_T("")); } }
void MainDialog::OnSelchangeListOutputStreams() { HRESULT hr; TCHAR sz[64]; UINT nSel; DWORD dwFlags=0; m_ListInputStreams.GetCurrentSelection(&nSel); // // Display relevant information about output stream // if (!m_pDMO) { return; } // Read output stream information flags hr = m_pDMO->GetOutputStreamInfo(nSel, &dwFlags); if (FAILED(hr)) { MessageBeep(0); return; } // Set stream info checkboxes m_CheckOutWholeSamples.SetCheck(dwFlags & DMO_OUTPUT_STREAMF_WHOLE_SAMPLES); m_CheckOutOneSample.SetCheck(dwFlags & DMO_OUTPUT_STREAMF_SINGLE_SAMPLE_PER_BUFFER); m_CheckOutFixedSize.SetCheck(dwFlags & DMO_OUTPUT_STREAMF_FIXED_SAMPLE_SIZE); m_CheckOutDiscardable.SetCheck(dwFlags & DMO_OUTPUT_STREAMF_DISCARDABLE); m_CheckOutOptional.SetCheck(dwFlags & DMO_OUTPUT_STREAMF_OPTIONAL); // Read preferred output type information DMO_MEDIA_TYPE dmt={0}; hr = m_pDMO->GetOutputType(nSel, 0, &dmt); if (SUCCEEDED(hr)) { hr = GetFormatString(sz, NUMELMS(sz), &dmt); } if (SUCCEEDED(hr)) { SetDlgItemText(IDC_STATIC_OUT_FORMAT, sz); } if (SUCCEEDED(hr)) { hr = GetGUIDString(sz, NUMELMS(sz), &dmt.majortype); } if (SUCCEEDED(hr)) { SetDlgItemText(IDC_STATIC_OUT_TYPE, sz); } if (SUCCEEDED(hr)) { hr = GetGUIDString(sz, NUMELMS(sz), &dmt.subtype); } if (SUCCEEDED(hr)) { SetDlgItemText(IDC_STATIC_OUT_SUBYTPE, sz); } MoFreeMediaType(&dmt); // Does this DMO support quality control? IDMOQualityControl *pQC=0; hr = m_pDMO->QueryInterface(IID_IDMOQualityControl, (void **) &pQC); if (SUCCEEDED(hr)) { m_CheckOutQC.SetCheck(TRUE); pQC->Release(); } }
void MainDialog::OnSelchangeListInputStreams() { HRESULT hr; TCHAR sz[64]; UINT nSel; DWORD dwFlags=0; m_ListInputStreams.GetCurrentSelection(&nSel); // // Display relevant information about input stream // if (!m_pDMO) { return; } // Read input stream information flags hr = m_pDMO->GetInputStreamInfo(nSel, &dwFlags); if (FAILED(hr)) { MessageBeep(0); return; } // Set stream info checkboxes m_CheckInWholeSamples.SetCheck(dwFlags & DMO_INPUT_STREAMF_WHOLE_SAMPLES); m_CheckInOneSample.SetCheck(dwFlags & DMO_INPUT_STREAMF_SINGLE_SAMPLE_PER_BUFFER); m_CheckInFixedSize.SetCheck(dwFlags & DMO_INPUT_STREAMF_FIXED_SAMPLE_SIZE); m_CheckInHoldsBuffers.SetCheck(dwFlags & DMO_INPUT_STREAMF_HOLDS_BUFFERS); // Read preferred input type information. The media type/subtypes // are arranged in order of preference, starting from zero. DMO_MEDIA_TYPE dmt={0}; hr = m_pDMO->GetInputType(nSel, 0, &dmt); if (SUCCEEDED(hr)) { hr = GetFormatString(sz, NUMELMS(sz), &dmt); } if (SUCCEEDED(hr)) { SetDlgItemText(IDC_STATIC_IN_FORMAT, sz); } if (SUCCEEDED(hr)) { hr = GetGUIDString(sz, NUMELMS(sz), &dmt.majortype); } if (SUCCEEDED(hr)) { SetDlgItemText(IDC_STATIC_IN_TYPE, sz); } if (SUCCEEDED(hr)) { hr = GetGUIDString(sz, NUMELMS(sz), &dmt.subtype); } if (SUCCEEDED(hr)) { SetDlgItemText(IDC_STATIC_IN_SUBTYPE, sz); } MoFreeMediaType(&dmt); // Does this DMO support quality control? IDMOQualityControl *pQC=0; hr = m_pDMO->QueryInterface(IID_IDMOQualityControl, (void **) &pQC); if (SUCCEEDED(hr)) { m_CheckInQC.SetCheck(TRUE); pQC->Release(); } }
int recChannel_t::source_format(char* newFormat) { __CONTEXT("recChannel_t::source_format"); int hr = 0; bool formatFound = false; IAMStreamConfig *pConfig = NULL; AM_MEDIA_TYPE * format = NULL; pControl->StopWhenReady(); ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList(); for(int i = 0; i<auxFormats.len() ; i++) { AM_MEDIA_TYPE format = *(auxFormats.nth(i)); IAMStreamConfig *pConfig = NULL; IVideoWindow * pWindow = NULL; char subtypeName [100]; memset(subtypeName,0,100); GetGUIDString(subtypeName,&format.subtype); VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) format.pbFormat; if((pVih==NULL && strcmp(newFormat,sourceFormat)==0 )|| (pVih->bmiHeader.biHeight == capInfo.heigth && pVih->bmiHeader.biWidth == capInfo.width && strcmp(subtypeName,newFormat)==0) || camInfo->getKind() == SHARED ) { if (strcmp(sourceFormat,newFormat)) { memset(sourceFormat,0,100); strcpy(sourceFormat,newFormat); } if (!hr && (camInfo->getKind() == CAM || camInfo->getKind() == SHARED)){ camInfo->output->Disconnect(); hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig); //pVih->AvgTimePerFrame = 666666;// pVih->AvgTimePerFrame = 333333/(frameRate); int hr = pConfig->SetFormat(&format); actualFormat = format; pConfig->Release(); } formatFound = true; break; } } if (!formatFound) { IAMStreamConfig *pConfig = NULL; if (camInfo->getKind() == CAM || camInfo->getKind() == SHARED) { VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*) actualFormat.pbFormat; camInfo->output->Disconnect(); hr = camInfo->output->QueryInterface(IID_IAMStreamConfig, (void**)&pConfig); //pVih->AvgTimePerFrame = 666666; if (pConfig) { int hr = pConfig->SetFormat(&actualFormat); pConfig->Release(); } } } NOTIFY("reChannel_t" "\r\n=========================================\r\n" "Channel %d : Source Description...\r\n" "- sourceName: %s\r\n" "- capture Size: %dx%d\r\n" "- supported Formats: %s\r\n" "- Window Info: (%d,%d,%d,%d)\r\n" "- Title: %s\r\n" "=========================================\r\n", getId(), camInfo->getCamName(), capInfo.width, capInfo.heigth, camInfo->getSupportedFormats(), windowInfo.top, windowInfo.left, windowInfo.width, windowInfo.heigth, title); remap(); if (mapping){ map(); } return 0; }
int recChannel_t::select_source(camInfo_t * source) { __CONTEXT("recChannel_t::select_source"); if (camInfo) { if (camInfo->getKind() == TEST) { looper->EndThread(); } #ifdef _WINDOWS if (camInfo->getKind() == MEDIA) { EndThread(); } #endif camInfo->setFree(true); } int hr = 0; pControl->StopWhenReady(); //pControl->Stop(); bool sharedDisplaySource = false; RECT sharedDisplayRect; char auxName[100]; if (source != NULL) { if (!(source->getKind() == CAM || source->getKind() == SHARED)) { capInfo.heigth = 0; capInfo.width = 0; }else{ if (source->getKind() == CAM) { if (!capInfo.heigth) capInfo.heigth = DEFAULT_CAPTURE_HEIGTH; if (!capInfo.width) capInfo.width = DEFAULT_CAPTURE_WIDTH; } if (source->getKind() == SHARED) { sharedDisplay_t * sharedDisplay = static_cast<sharedDisplay_t *>(source); sharedDisplayRect = sharedDisplay->getSharedRect(); capInfo.heigth = sharedDisplayRect.bottom - sharedDisplayRect.top; capInfo.width = sharedDisplayRect.right - sharedDisplayRect.left; mapping = true; //always mapping shDisplay channel sharedDisplaySource = true; strcpy(auxName,source->getCamName()); } } } refresh_channel(all); if (sharedDisplaySource && pSource == NULL) { camInfo_t *camInfo = createSharedDisplay(this,auxName); sharedDisplay_t *sharedDisplay = static_cast<sharedDisplay_t *>(camInfo); sharedDisplay->setSharedRect(sharedDisplayRect); } #ifdef _WINDOWS if (source->getKind() == MEDIA) { if (fControl) { if (source!=camInfo) { fControl->m_slide.SetPos(0); } } RunThread(); } #endif camInfo = source; pOutput = camInfo->output; pOutput->Disconnect(); sourceId = camInfo->getID(); if (!strlen(sourceFormat)) { memset(sourceFormat,0,100); char supportedFormats[100]; strcpy(supportedFormats,camInfo->getSupportedFormats()); for (int j=1;supportedFormats[j]!=';';j++) sourceFormat[j-1]=supportedFormats[j]; ql_t<AM_MEDIA_TYPE *> auxFormats = camInfo->getFormatList(); actualFormat = *(auxFormats.nth(0)); for (int k=0;k<auxFormats.len() ; k++) { AM_MEDIA_TYPE format = *(auxFormats.nth(k)); char subtypeName [100]; memset(subtypeName,0,100); GetGUIDString(subtypeName,&format.subtype); if (strcmp(subtypeName,"MEDIASUBTYPE_RGB24")==0) { actualFormat = format; strcpy(sourceFormat,"MEDIASUBTYPE_RGB24"); } } } pSource = camInfo->pSource; hr = pGraph->AddFilter(pSource, L"Capture Video Source"); errorCheck(hr); hr = grab_geometry(capInfo); camInfo->setFree(false); //leave critical section return hr; }