s32 IOCtlSrc::ReadTOC(char *toc, int msize) { DWORD size = 0; if (GetMediaType() >= 0) return -1; if (!tocCached) { memset(&tocrq, 0, sizeof(CDROM_READ_TOC_EX)); tocrq.Format = CDROM_READ_TOC_EX_FORMAT_FULL_TOC; tocrq.Msf = 1; tocrq.SessionTrack = 1; if (!OpenOK) return -1; int code = DeviceIoControl(device, IOCTL_CDROM_READ_TOC_EX, &tocrq, sizeof(tocrq), tocCacheData, 2048, &size, NULL); if (code == 0) return -1; tocCached = true; } memcpy(toc, tocCacheData, min(2048, msize)); return 0; }
////////////////////////////////////////////////////////////////////////// // CVCamStream is the one and only output pin of CVCam which handles // all the stuff. ////////////////////////////////////////////////////////////////////////// CVCamStream::CVCamStream(HRESULT *phr, CVCam *pParent, LPCWSTR pPinName) : CSourceStream(NAME("Virtual Cam"), phr, pParent, pPinName), m_pParent(pParent) // m_pParent is the CSource object that owns the pin { // Set the default media type as 320x240x24@15 GetMediaType(4, &m_mt); const char* filePathTxt = "C:\\Users\\Brendan\\Desktop\\out2.h264"; wchar_t wtext[40]; mbstowcs(wtext, filePathTxt, strlen(filePathTxt) + 1); LPCWSTR filePath = wtext; bufferFile = CreateFile( wtext, GENERIC_READ, FILE_SHARE_WRITE | FILE_SHARE_READ, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL ); DWORD error = GetLastError(); //bytesRead = 0; }
s32 IOCtlSrc::GetLayerBreakAddress() { DWORD size; if (GetMediaType() < 0) return -1; if (layerBreakCached) return layerBreak; dvdrs.BlockByteOffset.QuadPart = 0; dvdrs.Format = DvdPhysicalDescriptor; dvdrs.SessionId = sessID; dvdrs.LayerNumber = 0; if (DeviceIoControl(device, IOCTL_DVD_READ_STRUCTURE, &dvdrs, sizeof(dvdrs), &dld, sizeof(dld), &size, nullptr)) { if (dld.ld.NumberOfLayers == 0) { // Single layer layerBreak = 0; } else if (dld.ld.TrackPath == 0) { // PTP layerBreak = _byteswap_ulong(dld.ld.EndDataSector) - _byteswap_ulong(dld.ld.StartingDataSector); } else { // OTP layerBreak = _byteswap_ulong(dld.ld.EndLayerZeroSector) - _byteswap_ulong(dld.ld.StartingDataSector); } layerBreakCached = true; return layerBreak; } //if not a cd, and fails, assume single layer return 0; }
IEnumMediaTypes * BasePinImpl::EnumMediaTypes() { CLEnumMediaTypes* p = new CLEnumMediaTypes; // hr = CComObject<CLEnumMediaTypes>::CreateInstance(&p); // if (FAILED(hr)) return hr; // p->AddRef(); int n = 0; MediaType* mediaType; //mediaType.Create(); while (GetMediaType(n, &mediaType) == 0) { // ASSERT(mediaType.m_pmt != NULL); p->m_list.Add(mediaType); //mediaType.Create(); n++; } //p->m_list = &m_pins; p->m_pos = p->m_list.GetData(); return p; }
// return a list of all attachment body parts belong to this body part // Since we are using smart pointers, and it's not possible to cast // from <this> to a smart_ptr, we need to give this function a pointer // to itself. Really ugly but should work fine. int MimeBody::GetAttachmentList(shared_ptr<MimeBody> pThis, BodyList& rList) const { int nCount = 0; int nMediaType = GetMediaType(); if (MEDIA_MULTIPART != nMediaType) { if (IsAttachment()) { rList.push_back(pThis); nCount++; } } else { list<shared_ptr<MimeBody> >::const_iterator it; for (it=m_listBodies.begin(); it!=m_listBodies.end(); it++) { shared_ptr<MimeBody> pBP = *it; ASSERT(pBP != NULL); nCount += pBP->GetAttachmentList(pBP, rList); } } return nCount; }
int VideoCapture::Connect(const wchar_t* deviceName) { if (GetStatus() != OK) return -1; hr = m_pCapture->SetFiltergraph(m_pGraph); if (FAILED(hr)) return -2; IBaseFilter* pSrcFilter = NULL; hr = FindCaptureDevice(deviceName, &pSrcFilter); if (FAILED(hr)) return -3; // Add Capture filter to our graph. hr = m_pGraph->AddFilter(pSrcFilter, L"Video Capture"); if (FAILED(hr)) { pSrcFilter->Release(); return -3; } //Add Sample Grabber filter IBaseFilter* pGrabberFilter = NULL; hr = AddSampleGrabber(&pGrabberFilter); if (FAILED(hr)) { pSrcFilter->Release(); return -4; } IBaseFilter* pNullFilter = NULL; hr = AddNullRenderer(&pNullFilter); if (FAILED(hr)) { pGrabberFilter->Release(); pSrcFilter->Release(); return -5; } // Render the preview pin on the video capture filter hr = m_pCapture->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pSrcFilter, pGrabberFilter, pNullFilter); if (FAILED(hr)) { pNullFilter->Release(); pGrabberFilter->Release(); pSrcFilter->Release(); return -6; } pNullFilter->Release(); pGrabberFilter->Release(); pSrcFilter->Release(); m_Status = CONNECTED; hr = GetMediaType(); if (FAILED(hr)) return -7; return 0; }
s32 PlainIso::ReadTOC(char *toc,int msize) { DWORD size=0; if(GetMediaType()>=0) return -1; if(!tocCached) { CDROM_TOC_FULL_TOC_DATA *ftd=(CDROM_TOC_FULL_TOC_DATA*)tocCacheData; // Build basic TOC int length = 6 * sizeof(ftd->Descriptors[0]) + 2; ftd->Length[0] = length>>8; ftd->Length[1] = length; ftd->FirstCompleteSession=1; ftd->LastCompleteSession=2; int dm,ds,df; LBA_TO_MSF(dm,ds,df,sector_count); MKDESCRIPTOR(0,1,0x00,1,0xA0,0,0,0,1,0,0); MKDESCRIPTOR(1,1,0x00,1,0xA1,0,0,0,1,0,0); MKDESCRIPTOR(2,1,0x00,1,0xA2,0,0,0,dm,ds,df); MKDESCRIPTOR(3,1,0x00,5,0xB0,0,0,0,0,0,0); MKDESCRIPTOR(4,1,0x00,5,0xC0,0,0,0,0,0,0); MKDESCRIPTOR(5,1,0x04,1,0x01,0,0,0,0,2,0); tocCached = true; }
// return a list of all attachment body parts belong to this body part int CMimeBody::GetAttachmentList(CBodyList& rList) const { int nCount = 0; int nMediaType = GetMediaType(); if (MEDIA_MULTIPART != nMediaType) { string strName = GetName(); if (strName.size() > 0) { rList.push_back((CMimeBody*)this); nCount++; } } else { list<CMimeBody*>::const_iterator it; for (it=m_listBodies.begin(); it!=m_listBodies.end(); it++) { CMimeBody* pBP = *it; ASSERT(pBP != NULL); nCount += pBP->GetAttachmentList(rList); } } return nCount; }
CScreenCaptureSourcePin::CScreenCaptureSourcePin(HRESULT *phr, CSource *pFilter, HANDLE hEventContinue, HANDLE hEventStop) :CSourceStream(NAME("Fishjam Screen Capture Pin"), phr, pFilter, L"Out") , m_hEventContinue(hEventContinue) , m_hEventStop(hEventStop) //, m_FrameWritten(0) //, m_bZeroMemory(FALSE) , m_nFrameNumber(0) //, m_iImageWidth(320) //, m_iImageHeight(240) //, m_nAvgTimePerFrame(FPS_25) //, m_rtFrameLength(FPS_25) //, m_nCurrentBitDepth(32) ,m_dwAdviseToken(0) { ATLTRACE(TEXT("CScreenCaptureSourcePin::CScreenCaptureSourcePin, this=0x%x, CurThreadId=%d\n"), this, GetCurrentThreadId()); CAutoLock cAutoLock(&m_cSharedState); //m_pBlockElapse = new FTL::CFBlockElapse(TEXT(__FILE__), __LINE__, TEXT(__FUNCDNAME__), FTL::_ReturnAddress()); //CAutoLock cAutoLock(&m_cSharedState); m_bMouseOverlay = TRUE; m_nWidth = DEFAULT_WIDTH; m_nHeight = DEFAULT_HEIGTH; m_nMaxWidth = ::GetSystemMetrics(SM_CXVIRTUALSCREEN); m_nMaxHeight = ::GetSystemMetrics(SM_CYVIRTUALSCREEN); m_rcCapture.left = m_rcCapture.top = 0; m_rcCapture.right = m_rcCapture.left + m_nWidth; m_rcCapture.bottom = m_rcCapture.top + m_nHeight; m_nBitCount = DEFAULT_BIT_COUNT; m_nAvgTimePerFrame = UNITS / DEFAULT_FPS; //ZeroMemory(&m_bmpInfo, sizeof(m_bmpInfo)); //m_bmpInfo.bmiHeader.biSize = sizeof(m_bmpInfo.bmiHeader); m_rtStart = 0; m_rtStreamOffset = 0; m_rtStartAt = -1; m_rtStopAt = -1; //m_iImageWidth = GetSystemMetrics(SM_CXSCREEN); //m_iImageHeight = GetSystemMetrics(SM_CYSCREEN); m_dwStartCookie = 0; m_dwStopCookie = 0; m_bShouldFlush = FALSE; m_bStartNotified = FALSE; m_bStopNotified = FALSE; m_pProperties = NULL; //m_pClock = NULL; m_hSemaphore = NULL; m_rfMaxRecordTime = 0; m_mt.majortype = GUID_NULL; m_pScreenCaptureImpl = NULL; m_rtClockStart = 0; m_rtClockStop = 0; m_bFirstFrame = TRUE; GetMediaType(0, &m_mt); m_hEventAfterFilterRun = ::CreateEvent(NULL, TRUE, FALSE, NULL); }
// returns the "range" of fps, etc. for this index HRESULT STDMETHODCALLTYPE CPushPinDesktop::GetStreamCaps(int iIndex, AM_MEDIA_TYPE **pmt, BYTE *pSCC) { CAutoLock cAutoLock(m_pFilter->pStateLock()); HRESULT hr = GetMediaType(iIndex, &m_mt); // ensure setup/re-use m_mt ... // some are indeed shared, apparently. if(FAILED(hr)) { return hr; } *pmt = CreateMediaType(&m_mt); // a windows lib method, also does a copy for us if (*pmt == NULL) return E_OUTOFMEMORY; DECLARE_PTR(VIDEO_STREAM_CONFIG_CAPS, pvscc, pSCC); /* most of these are listed as deprecated by msdn... yet some still used, apparently. odd. */ pvscc->VideoStandard = AnalogVideo_None; pvscc->InputSize.cx = getCaptureDesiredFinalWidth(); pvscc->InputSize.cy = getCaptureDesiredFinalHeight(); // most of these values are fakes.. pvscc->MinCroppingSize.cx = getCaptureDesiredFinalWidth(); pvscc->MinCroppingSize.cy = getCaptureDesiredFinalHeight(); pvscc->MaxCroppingSize.cx = getCaptureDesiredFinalWidth(); pvscc->MaxCroppingSize.cy = getCaptureDesiredFinalHeight(); pvscc->CropGranularityX = 1; pvscc->CropGranularityY = 1; pvscc->CropAlignX = 1; pvscc->CropAlignY = 1; pvscc->MinOutputSize.cx = 1; pvscc->MinOutputSize.cy = 1; pvscc->MaxOutputSize.cx = getCaptureDesiredFinalWidth(); pvscc->MaxOutputSize.cy = getCaptureDesiredFinalHeight(); pvscc->OutputGranularityX = 1; pvscc->OutputGranularityY = 1; pvscc->StretchTapsX = 1; // We do 1 tap. I guess... pvscc->StretchTapsY = 1; pvscc->ShrinkTapsX = 1; pvscc->ShrinkTapsY = 1; pvscc->MinFrameInterval = m_rtFrameLength; // the larger default is actually the MinFrameInterval, not the max pvscc->MaxFrameInterval = 500000000; // 0.02 fps :) [though it could go lower, really...] pvscc->MinBitsPerSecond = (LONG) 1*1*8*GetFps(); // if in 8 bit mode 1x1. I guess. pvscc->MaxBitsPerSecond = (LONG) getCaptureDesiredFinalWidth()*getCaptureDesiredFinalHeight()*32*GetFps() + 44; // + 44 header size? + the palette? return hr; }
////////////////////////////////////////////////////////////////////////// // UVCamStream is the one and only output pin of UVCam which handles // all the stuff. ////////////////////////////////////////////////////////////////////////// UVCamStream::UVCamStream(HRESULT *phr, UVCam *pParent, LPCWSTR pPinName) : CSourceStream(NAME(QUOTED_CAM_NAME),phr, pParent, pPinName), m_pParent(pParent), m_iDefaultRepeatTime(50) { bus.init(); lastId = 0; lastChange = -1000; haveId = false; ct = 0; // Set the default media type as 320x240x24@15 GetMediaType(4, &m_mt); }
HRESULT CBonSrcPin::CheckMediaType(const CMediaType *pMediaType) { CheckPointer(pMediaType, E_POINTER); CAutoLock AutoLock(&m_pFilter->m_cStateLock); // メディアタイプをチェックする CMediaType mt; GetMediaType(0, &mt); return (*pMediaType == mt) ? S_OK : E_FAIL; }
int ProcessFile(ZFS_TASK *ptask, PZFS pzfs) { if(ptask == NULL) { ZWarning(DBG_MISC, "task is empty"); return -1; } char fullPath[MAX_PATH_SIZE] = {0}; int isDir; /* Check to see if the file is exists to avoid in some cases * that the file which removed may add to task queue twice */ sprintf (fullPath, "%s/%s", ptask->path, ptask->name); if (ZUtilIsFilePresent (fullPath, &isDir) == 0) { ZWarning (DBG_MISC, "Warning: the file doesn't exists, ignore it->%s", fullPath); return 0; } ZInfo4 (DBG_MISC, "Handling file -> %s", fullPath); int mediaType = GetMediaType(ptask->name); ZInfo4(DBG_MISC, "MediaType[%d]", mediaType); // if(mediaType == Z_UNKNOWN_FILE) { // return -1; // } if((mediaType > Z_PHOTO_START && mediaType < Z_PHOTO_END) && pzfs->photoConfig) { ProcessPhotoFile(fullPath); } else if((mediaType > Z_AUDIO_START && mediaType < Z_AUDIO_END) && pzfs->audioConfig ) { ProcessAudioFile(ptask); } else if((mediaType > Z_VIDEO_START && mediaType < Z_VIDEO_END) && pzfs->videoConfig) { ProcessVideoFile(fullPath); } else { //meet a unsupported file if(REMOVE_UNSUPPORTED_FILE) { ZInfo4 (DBG_MISC, "Delete unsupport file."); if (removeFile (pzfs, ptask->path, ptask->name) != 0) { ZError (DBG_MISC, "Remove invalid file failed->%s/%s", ptask->path, ptask->name); return -1; } else { deletedPlusOne (); /* Deleted plus one */ saveSummary (pzfs); /* Sync to DB */ } } else { ZInfo4 (DBG_MISC, "Skip unsupported file."); } } return 0; }
//--------------------------------------------------------------------------- // Code //--------------------------------------------------------------------------- XnVideoStream::XnVideoStream(HRESULT *phr, XnVideoSource *pParent, xn::ImageGenerator& imageGen, LPCWSTR pPinName) : CSourceStream(NAME("Video Stream"), phr, pParent, pPinName), m_imageGen(imageGen), m_bFlipVertically(FALSE), m_nPreferredMode(-1), m_Dump(pParent->m_Dump) { ASSERT(phr); xnFPSInit(&m_FPS, 90); XnUInt32 nSupportedModes = m_imageGen.GetSupportedMapOutputModesCount(); XnMapOutputMode* aOutputModes = new XnMapOutputMode[nSupportedModes]; XnStatus nRetVal = m_imageGen.GetSupportedMapOutputModes(aOutputModes, nSupportedModes); if (nRetVal != XN_STATUS_OK) { *phr = E_UNEXPECTED; delete[] aOutputModes; return; } nRetVal = m_aSupportedModes.Reserve(nSupportedModes); if (nRetVal != XN_STATUS_OK) { *phr = E_UNEXPECTED; delete[] aOutputModes; return; } XnBool bRGB = m_imageGen.IsPixelFormatSupported(XN_PIXEL_FORMAT_RGB24); XnBool bMJPEG = m_imageGen.IsPixelFormatSupported(XN_PIXEL_FORMAT_MJPEG); Mode mode; for (XnUInt32 i = 0; i < nSupportedModes; ++i) { mode.OutputMode = aOutputModes[i]; if (bRGB) { mode.Format = XN_PIXEL_FORMAT_RGB24; m_aSupportedModes.AddLast(mode); } if (bMJPEG) { mode.Format = XN_PIXEL_FORMAT_MJPEG; m_aSupportedModes.AddLast(mode); } } CMediaType mediaType; GetMediaType(0, &mediaType); SetMediaType(&mediaType); }
XhtmlDoc::WellFormedError XMLResource::WellFormedErrorLocation() const { QReadLocker locker(&GetLock()); QString mtype = GetMediaType(); XhtmlDoc::WellFormedError error; if ((mtype == "application/xhtml+xml") || (mtype == "application/x-dtbook+xml")) { error = XhtmlDoc::WellFormedErrorForSource(GetText()); } else { error = CleanSource::WellFormedXMLCheck(GetText(), mtype); } return error; }
// // GetMediaType/3 // // By default we support only one type // iPosition indexes are 0-n HRESULT CSourceStream::GetMediaType(int iPosition, CMediaType *pMediaType) { CAutoLock lock(m_pFilter->pStateLock()); if (iPosition<0) { return E_INVALIDARG; } if (iPosition>0) { return VFW_S_NO_MORE_ITEMS; } return GetMediaType(pMediaType); }
// // CheckMediaType // // Do we support this type? Provides the default support for 1 type. HRESULT CSourceStream::CheckMediaType(const CMediaType *pMediaType) { CAutoLock lock(m_pFilter->pStateLock()); CMediaType mt; GetMediaType(&mt); if (mt == *pMediaType) { return NOERROR; } return E_FAIL; }
HRESULT CSubtitleStream::CheckMediaType(const CMediaType* pmt) { CAutoLock lock(m_pFilter->pStateLock()); CMediaType mt; GetMediaType(&mt); if (mt.majortype == pmt->majortype && mt.subtype == pmt->subtype) { return NOERROR; } return E_FAIL; }
bool XMLResource::FileIsWellFormed() const { // TODO: expand this with a dialog to fix the problem QReadLocker locker(&GetLock()); QString mtype = GetMediaType(); if ((mtype == "application/xhtml+xml") || (mtype == "application/x-dtbook+xml")) { XhtmlDoc::WellFormedError error = XhtmlDoc::WellFormedErrorForSource(GetText()); bool well_formed = error.line == -1; return well_formed; } bool well_formed = CleanSource::IsWellFormedXML(GetText(),mtype); return well_formed; }
////////////////////////////////////////////////////////////////////////// // CVCamStream is the one and only output pin of CVCam which handles // all the stuff. ////////////////////////////////////////////////////////////////////////// CVCamStream::CVCamStream(HRESULT *phr, CVCam *pParent, LPCWSTR pPinName) : CSourceStream(NAME("SpoutCam"), phr, pParent, pPinName), m_pParent(pParent) { bMemoryMode = false; // Default mode is texture, true means memoryshare bDX9mode = false; // Not currently used bInvert = true; // Not currently used bInitialized = false; // Spoutcam reiver bGLinitialized = false; // OpenGL bBGRA = false; // #ifdef GL_EXT_bgra = GL_BGRA_EXT and GL_BGR_EXT bDisconnected = false; // Has to connect before can disconnect or it will never connect glContext = NULL; // Context is established within this application g_Width = 640; // if there is no Sender, getmediatype will use defaults g_Height = 480; g_SenderWidth = 640; // give it an initial size - this will be changed if a sender is running at start g_SenderHeight = 480; g_senderBuffer = NULL; // local rgb buffer the same size as the sender (can be a different size to the filter) g_senderTexture = 0; g_SenderName[0] = 0; // // On startup get the active Sender name if any. // if(receiver.GetActiveSender(g_SenderName)) { // Set the global width and height receiver.GetImageSize(g_SenderName, g_SenderWidth, g_SenderHeight, bMemoryMode); g_Width = g_SenderWidth; g_Height = g_SenderHeight; } // Cannot use receiver.GetMemoryShareMode() here because // it requires an OpenGL context, so look at the registry directly. DWORD dwMemory = 0; if(receiver.spout.interop.spoutdx.ReadDwordFromRegistry(&dwMemory, "Software\\Leading Edge\\Spout", "MemoryShare")) { if(dwMemory == 1) { bMemoryMode = true; } } bDX9mode = receiver.GetDX9(); // Currently not used, might use this flag later // Set mediatype to shared width and height or if it did not connect set defaults GetMediaType(4, &m_mt); NumDroppedFrames = 0; NumFrames = 0; hwndButton = NULL; // ensure NULL of static variable for the OpenGL window handle }
// Called when graph is first started HRESULT CVCamStream::OnThreadCreate() { assert(currentlyRunning == 0); // sanity... currentlyRunning = TRUE; GetMediaType(0, &m_mt); // give it a default type... HRESULT hr = LoopbackCaptureSetup(); if (FAILED(hr)) { printf("IAudioCaptureClient::setup failed"); return hr; } return NOERROR; }
// clear all attachments from this subtype. void MimeBody::ClearAttachments() { if (GetMediaType() ==MEDIA_MULTIPART) { list<shared_ptr<MimeBody> >::iterator it = m_listBodies.begin(); while (it != m_listBodies.end()) { shared_ptr<MimeBody> pBody = (*it); if (pBody->IsAttachment()) it = m_listBodies.erase(it); else it++; } } }
// // CheckMediaType // // Do we support this type? Provides the default support for 1 type. HRESULT CDynamicSourceStream::CheckMediaType(const CMediaType *pMediaType) { CheckPointer(pMediaType,E_POINTER); CAutoLock lock(m_pFilter->pStateLock()); CMediaType mt; GetMediaType(&mt); if(mt == *pMediaType) { return NOERROR; } return E_FAIL; }
CKCamStream::CKCamStream(HRESULT *phr, CKCam *pParent, LPCWSTR pPinName) : CSourceStream(NAME("KinectWebCam"), phr, pParent, pPinName), m_num_frames(0), m_num_dropped(0), m_pParent(pParent) { // try to load the settings settings::load(); // try to initialize a device DeviceEnumeration f_devices[] = { {"kinect_v2", settings::KinectV2Enabled}, {"kinect", settings::KinectV1Enabled}, {"null", true} }; for (auto f_dev = std::begin(f_devices); m_device == nullptr && f_dev != std::end(f_devices); ++f_dev) { if (f_dev->m_enabled) { m_device = device::device_factory(f_dev->m_type); if (!m_device->connect_to_first()) m_device = nullptr; } } // store the default media type if (m_device) { GetMediaType(m_device->video_resolution_preferred() + 1, &m_mt); // GetMediaType is 1 based } // initialize the camera focus in the center of the camera if (m_device) { auto f_native_res = m_device->video_resolution(m_device->video_resolution_native()); m_focus.m_x = f_native_res.m_width / 2; m_focus.m_y = f_native_res.m_height / 2; } // disconnect from the device until playback is started if (m_device) { m_device->disconnect(); } }
//---------------------------------------------------------------------------- //! @brief 指定されたメディアタイプが利用できるかどうか確認する //! @param pmt : 出力出来るかどうか確認をするメディアタイプ //! @return エラーコード //! @note 現在、DirectX VAを常にはじくようにしている //---------------------------------------------------------------------------- HRESULT CDemuxOutputPin::CheckMediaType( const CMediaType *pmt ) { CAutoLock cAutoLock(m_Lock); if( m_Stream->IsDXVASubtype( pmt ) ) return VFW_E_TYPE_NOT_ACCEPTED; CMediaType mt; int i = 0; while( GetMediaType( i, &mt ) == S_OK ) { if( mt.majortype == pmt->majortype && mt.subtype == pmt->subtype && mt.formattype == pmt->formattype ) return S_OK; ++i; } return VFW_E_TYPE_NOT_ACCEPTED; }
//---------------------------------------------------------------------------- //! @brief メディアタイプを設定する //! @param pmt : メディアタイプ //! @return エラーコード //---------------------------------------------------------------------------- HRESULT CWMOutput::SetMediaType( const CMediaType *pmt ) { CMediaType mt; if( GetMediaType(0, &mt ) == S_OK ) { if( mt.majortype == pmt->majortype && mt.subtype == pmt->subtype && mt.bFixedSizeSamples == pmt->bFixedSizeSamples && mt.bTemporalCompression == pmt->bTemporalCompression && // mt.lSampleSize == pmt->lSampleSize && mt.formattype == pmt->formattype ) { return S_OK; } } return VFW_E_INVALIDMEDIATYPE; }
// clear all attachments from this subtype. void MimeBody::RemoveAttachment(shared_ptr<MimeBody> pAttachment) { if (GetMediaType() ==MEDIA_MULTIPART) { list<shared_ptr<MimeBody> >::iterator it = m_listBodies.begin(); while (it != m_listBodies.end()) { shared_ptr<MimeBody> pBody = (*it); if (pBody->IsAttachment() && pBody == pAttachment) { it = m_listBodies.erase(it); return; } else it++; } } }
void IOCtlSrc::SetSpindleSpeed(bool restore_defaults) { DWORD dontcare; USHORT speed = 0; if (GetMediaType() < 0) speed = 4800; // CD-ROM to ~32x (PS2 has 24x (3600 KB/s)) else speed = 11080; // DVD-ROM to ~8x (PS2 has 4x (5540 KB/s)) if (!restore_defaults) { CDROM_SET_SPEED s; s.RequestType = CdromSetSpeed; s.RotationControl = CdromDefaultRotation; s.ReadSpeed = speed; s.WriteSpeed = speed; if (DeviceIoControl(device, IOCTL_CDROM_SET_SPEED, //operation to perform &s, sizeof(s), //no input buffer NULL, 0, //output buffer &dontcare, //#bytes returned (LPOVERLAPPED)NULL)) //synchronous I/O == 0) { printf(" * CDVD: setSpindleSpeed success (%uKB/s)\n", speed); } else { printf(" * CDVD: setSpindleSpeed failed! \n"); } } else { CDROM_SET_SPEED s; s.RequestType = CdromSetSpeed; s.RotationControl = CdromDefaultRotation; s.ReadSpeed = 0xffff; // maximum ? s.WriteSpeed = 0xffff; DeviceIoControl(device, IOCTL_CDROM_SET_SPEED, //operation to perform &s, sizeof(s), //no input buffer NULL, 0, //output buffer &dontcare, //#bytes returned (LPOVERLAPPED)NULL); //synchronous I/O == 0) } }
////////////////////////////////////////////////////////////////////////// // CVCamStream is the one and only output pin of CVCam which handles // all the stuff. ////////////////////////////////////////////////////////////////////////// CVCamStream::CVCamStream(HRESULT *phr, CVCam *pParent, LPCWSTR pPinName) : CSourceStream(NAME("Spout Cam"), phr, pParent, pPinName), m_pParent(pParent) { bMemoryMode = false; // Default mode is texture, true means memoryshare bInitialized = false; bGLinitialized = false; bConnected = false; bDisconnected = false; // has to connect before can disconnect or it will never connect glContext = 0; ShareHandle = NULL; // local copy of texture share handle g_Width = 320; // if there is no Sender, getmediatype will use defaults g_Height = 240; senderWidth = 0; senderHeight = 0; g_fbo = 0; g_fbo_texture = 0; SharedMemoryName[0] = 0; // // On startup get the active Sender name if any. // // The purpose of an ActiveSender' is to set the user requested name from a dialog // which is independent of the program, so an external means of finding the selection is required // If there is no active Sender, find out if any are registered, and if so, use the first one in the list // Currently if no Sender exists, the camera will go static and has to be closed and opened // again to detect a Sender. Resizing is needed before any more can be done. // // if(receiver.GetImageSize(SharedMemoryName, senderWidth, senderHeight, bMemoryMode)) { // printf(" *** First size = %dx%d - ,bMemoryMode = %d\n", senderWidth, senderHeight, bMemoryMode); if(bMemoryMode) receiver.SetMemoryShareMode(true); // memory mode from now on // Set the global width and height g_Width = senderWidth; g_Height = senderHeight; } // Set mediatype to shared width and height or if it did not connect set defaults GetMediaType(&m_mt); NumDroppedFrames = 0; NumFrames = 0; }
// What format is the capture card capturing right now? // The caller must free it with DeleteMediaType(*ppmt) // HRESULT CPushStream::GetFormat(AM_MEDIA_TYPE **ppmt) { dprintf("IAMStreamConfig::GetFormat"); if (ppmt == NULL) return E_POINTER; *ppmt = (AM_MEDIA_TYPE *)CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE)); if (*ppmt == NULL) return E_OUTOFMEMORY; ZeroMemory(*ppmt, sizeof(AM_MEDIA_TYPE)); HRESULT hr = GetMediaType(0,(CMediaType *)*ppmt); if (hr != NOERROR) { CoTaskMemFree(*ppmt); *ppmt = NULL; return hr; } return NOERROR; }