DeviceRef DeviceManager::addDevice( const string &key ) { // warn about duplicate Device keys since it will be impossible to select one or the other. for( const auto &dev : mDevices ) { if( dev->getKey() == key ) { CI_LOG_W( "multiple Devices with same key: " << key ); break; } } mDevices.push_back( DeviceRef( new Device( key ) ) ); return mDevices.back(); }
void VaoImplCore::bindImpl( Context *context ) { if( context && ( context != mCtx ) ) { CI_LOG_W( "VAO bound against different context from allocation. Reassigning context." ); reassignImpl( context ); } glBindVertexArray( mId ); if( context ) { context->reflectBufferBinding( GL_ELEMENT_ARRAY_BUFFER, mLayout.mElementArrayBufferBinding ); mLayout.mCachedArrayBufferBinding = context->getBufferBinding( GL_ARRAY_BUFFER ); } }
void Batch::initVao( const AttributeMapping &attributeMapping ) { auto ctx = gl::context(); ctx->pushBufferBinding( GL_ARRAY_BUFFER ); mVao = Vao::create(); ctx->pushVao( mVao ); mVboMesh->buildVao( mGlsl, attributeMapping ); ctx->popVao(); ctx->popBufferBinding( GL_ARRAY_BUFFER ); if( ! mVao->getLayout().isVertexAttribArrayEnabled( 0 ) ) CI_LOG_W("VertexAttribArray at location 0 not enabled, this has performance implications."); mAttribMapping = attributeMapping; }
size_t SourceFileMediaFoundation::processNextReadSample() { ::IMFSample *mediaSample; DWORD streamFlags = 0; LONGLONG timeStamp; HRESULT hr = mSourceReader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM, 0, NULL, &streamFlags, &timeStamp, &mediaSample ); CI_ASSERT( hr == S_OK ); if( streamFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED ) { CI_LOG_W( "type change unhandled" ); return 0; } if( streamFlags & MF_SOURCE_READERF_ENDOFSTREAM ) { // end of file return 0; } if( ! mediaSample ) { // out of samples mediaSample->Release(); return 0; } auto samplePtr = ci::msw::makeComUnique( mediaSample ); DWORD bufferCount; hr = samplePtr->GetBufferCount( &bufferCount ); CI_ASSERT( hr == S_OK ); CI_ASSERT( bufferCount == 1 ); // just looking out for a file type with more than one buffer.. haven't seen one yet. // get the buffer ::IMFMediaBuffer *mediaBuffer; BYTE *audioData = NULL; DWORD audioDataLength; hr = samplePtr->ConvertToContiguousBuffer( &mediaBuffer ); hr = mediaBuffer->Lock( &audioData, NULL, &audioDataLength ); size_t numChannels = mNumChannels; size_t numFramesRead = audioDataLength / ( mBytesPerSample * numChannels ); mReadBuffer.setNumFrames( numFramesRead ); if( mSampleType == SampleType::FLOAT_32 ) { float *sourceFloatSamples = (float *)audioData; if( numChannels == 1 ) memcpy( mReadBuffer.getData(), sourceFloatSamples, numFramesRead * sizeof( float ) ); else dsp::deinterleave( sourceFloatSamples, mReadBuffer.getData(), mReadBuffer.getNumFrames(), numChannels, numFramesRead ); } else if( mSampleType == SampleType::INT_16 ) { int16_t *sourceInt16Samples = (int16_t *)audioData; dsp::deinterleave( sourceInt16Samples, mReadBuffer.getData(), mReadBuffer.getNumFrames(), numChannels, numFramesRead ); } else if( mSampleType == SampleType::INT_24 ) { const char *sourceInt24Samples = (const char *)audioData; if( numChannels == 1 ) dsp::convertInt24ToFloat( sourceInt24Samples, mReadBuffer.getData(), numFramesRead ); else { if( mBitConverterBuffer.getNumFrames() != numFramesRead ) mBitConverterBuffer.setNumFrames( numFramesRead ); dsp::convertInt24ToFloat( sourceInt24Samples, mBitConverterBuffer.getData(), numFramesRead * numChannels ); dsp::deinterleave( mBitConverterBuffer.getData(), mReadBuffer.getData(), mReadBuffer.getNumFrames(), numChannels, numFramesRead ); } } else CI_ASSERT_NOT_REACHABLE(); hr = mediaBuffer->Unlock(); CI_ASSERT( hr == S_OK ); mediaBuffer->Release(); return numFramesRead; }
HRESULT D3DPresentEngine::CreateD3DDevice() { HRESULT hr = S_OK; HWND hwnd = NULL; HMONITOR hMonitor = NULL; UINT uAdapterID = D3DADAPTER_DEFAULT; DWORD vp = 0; D3DCAPS9 ddCaps; ZeroMemory(&ddCaps, sizeof(ddCaps)); IDirect3DDevice9Ex* pDevice = NULL; // Hold the lock because we might be discarding an exisiting device. AutoLock lock(m_ObjectLock); if (!m_pD3D9 || !m_pDeviceManager) { return MF_E_NOT_INITIALIZED; } hwnd = GetDesktopWindow(); // Note: The presenter creates additional swap chains to present the // video frames. Therefore, it does not use the device's implicit // swap chain, so the size of the back buffer here is 1 x 1. D3DPRESENT_PARAMETERS pp; ZeroMemory(&pp, sizeof(pp)); pp.BackBufferWidth = 1; pp.BackBufferHeight = 1; pp.Windowed = TRUE; pp.SwapEffect = D3DSWAPEFFECT_COPY; pp.BackBufferFormat = D3DFMT_UNKNOWN; pp.hDeviceWindow = hwnd; pp.Flags = D3DPRESENTFLAG_VIDEO; pp.PresentationInterval = D3DPRESENT_INTERVAL_DEFAULT; // Find the monitor for this window. if (m_hwnd) { hMonitor = MonitorFromWindow(m_hwnd, MONITOR_DEFAULTTONEAREST); // Find the corresponding adapter. CHECK_HR(hr = FindAdapter(m_pD3D9, hMonitor, &uAdapterID)); } // Get the device caps for this adapter. CHECK_HR(hr = m_pD3D9->GetDeviceCaps(uAdapterID, D3DDEVTYPE_HAL, &ddCaps)); if(ddCaps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT) { vp = D3DCREATE_HARDWARE_VERTEXPROCESSING; } else { CI_LOG_W("Software Cap, No bueno :P"); //printf("Software cap, no bueno\n"); vp = D3DCREATE_SOFTWARE_VERTEXPROCESSING; } // Create the device. CHECK_HR(hr = m_pD3D9->CreateDeviceEx( uAdapterID, D3DDEVTYPE_HAL, pp.hDeviceWindow, vp | D3DCREATE_NOWINDOWCHANGES | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE , &pp, NULL, &pDevice )); // Get the adapter display mode. CHECK_HR(hr = m_pD3D9->GetAdapterDisplayMode(uAdapterID, &m_DisplayMode)); // Reset the D3DDeviceManager with the new device CHECK_HR(hr = m_pDeviceManager->ResetDevice(pDevice, m_DeviceResetToken)); SAFE_RELEASE(m_pDevice); m_pDevice = pDevice; m_pDevice->AddRef(); done: SAFE_RELEASE(pDevice); return hr; }