bool directx_camera_server::start_com_and_graphbuilder() { HRESULT hr; //------------------------------------------------------------------- // Create COM and DirectX objects needed to access a video stream. // Initialize COM. This must have a matching uninitialize somewhere before // the object is destroyed. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before " "CoInitialize\n"); #endif _com = comutils::ComInit::init(); // Create the filter graph manager #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before " "CoCreateInstance FilterGraph\n"); #endif CoCreateInstance(CLSID_FilterGraph, nullptr, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, AttachPtr(_pGraph)); checkForConstructionError(_pGraph, "graph manager"); _pGraph->QueryInterface(IID_IMediaControl, AttachPtr(_pMediaControl)); // Create the Capture Graph Builder. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before " "CoCreateInstance CaptureGraphBuilder2\n"); #endif CoCreateInstance(CLSID_CaptureGraphBuilder2, nullptr, CLSCTX_INPROC, IID_ICaptureGraphBuilder2, AttachPtr(_pBuilder)); checkForConstructionError(_pBuilder, "graph builder"); // Associate the graph with the builder. #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before " "SetFilterGraph\n"); #endif _pBuilder->SetFiltergraph(_pGraph.get()); return true; }
bool directx_camera_server::open_moniker_and_finish_setup( comutils::Ptr<IMoniker> pMoniker, FilterOperation const &sourceConfig, unsigned width, unsigned height) { if (!pMoniker) { fprintf(stderr, "directx_camera_server::open_moniker_and_finish_setup(): " "Null device moniker passed: no device found?\n"); return false; } auto prop = PropertyBagHelper{*pMoniker}; printf("directx_camera_server: Using capture device '%s' at path '%s'\n", getDeviceHumanDesc(prop).c_str(), getDevicePath(prop).c_str()); // Bind the chosen moniker to a filter object. auto pSrc = comutils::Ptr<IBaseFilter>{}; pMoniker->BindToObject(nullptr, nullptr, IID_IBaseFilter, AttachPtr(pSrc)); //------------------------------------------------------------------- // Construct the sample grabber that will be used to snatch images from // the video stream as they go by. Set its media type and callback. // Create and configure the Sample Grabber. _pSampleGrabberWrapper.reset(new SampleGrabberWrapper); // Get the exchange object for receiving data from the sample grabber. sampleExchange_ = _pSampleGrabberWrapper->getExchange(); //------------------------------------------------------------------- // Ask for the video resolution that has been passed in. // This code is based on // intuiting that we need to use the SetFormat call on the IAMStreamConfig // interface; this interface is described in the help pages. // If the width and height are specified as 0, then they are not set // in the header, letting them use whatever is the default. /// @todo factor this out into its own header. if ((width != 0) && (height != 0)) { auto pStreamConfig = comutils::Ptr<IAMStreamConfig>{}; _pBuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSrc.get(), IID_IAMStreamConfig, AttachPtr(pStreamConfig)); checkForConstructionError(pStreamConfig, "StreamConfig interface"); AM_MEDIA_TYPE mt = {0}; mt.majortype = MEDIATYPE_Video; // Ask for video media producers mt.subtype = MEDIASUBTYPE_RGB24; // Ask for 8 bit RGB VIDEOINFOHEADER vih = {0}; mt.pbFormat = reinterpret_cast<BYTE *>(&vih); auto pVideoHeader = &vih; pVideoHeader->bmiHeader.biBitCount = 24; pVideoHeader->bmiHeader.biWidth = width; pVideoHeader->bmiHeader.biHeight = height; pVideoHeader->bmiHeader.biPlanes = 1; pVideoHeader->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); pVideoHeader->bmiHeader.biSizeImage = dibsize(pVideoHeader->bmiHeader); // Set the format type and size. mt.formattype = FORMAT_VideoInfo; mt.cbFormat = sizeof(VIDEOINFOHEADER); // Set the sample size. mt.bFixedSizeSamples = TRUE; mt.lSampleSize = dibsize(pVideoHeader->bmiHeader); // Make the call to actually set the video type to what we want. if (pStreamConfig->SetFormat(&mt) != S_OK) { fprintf(stderr, "directx_camera_server::open_and_find_parameters():" " Can't set resolution to %dx%d using uncompressed " "24-bit video\n", pVideoHeader->bmiHeader.biWidth, pVideoHeader->bmiHeader.biHeight); return false; } } //------------------------------------------------------------------- // Create a null renderer that will be used to discard the video frames // on the output pin of the sample grabber #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Before " "createNullRenderFilter\n"); #endif auto pNullRender = createNullRenderFilter(); auto sampleGrabberFilter = _pSampleGrabberWrapper->getFilter(); //------------------------------------------------------------------- // Build the filter graph. First add the filters and then connect them. // pSrc is the capture filter for the video device we found above. auto hr = _pGraph->AddFilter(pSrc.get(), L"Video Capture"); BOOST_ASSERT_MSG(SUCCEEDED(hr), "Adding Video Capture filter to graph"); // Add the sample grabber filter hr = _pGraph->AddFilter(sampleGrabberFilter.get(), L"SampleGrabber"); BOOST_ASSERT_MSG(SUCCEEDED(hr), "Adding SampleGrabber filter to graph"); // Add the null renderer filter hr = _pGraph->AddFilter(pNullRender.get(), L"NullRenderer"); BOOST_ASSERT_MSG(SUCCEEDED(hr), "Adding NullRenderer filter to graph"); // Connect the output of the video reader to the sample grabber input ConnectTwoFilters(*_pGraph, *pSrc, *sampleGrabberFilter); // Connect the output of the sample grabber to the null renderer input ConnectTwoFilters(*_pGraph, *sampleGrabberFilter, *pNullRender); // If we were given a config action for the source, do it now. if (sourceConfig) { sourceConfig(*pSrc); } //------------------------------------------------------------------- // XXX See if this is a video tuner card by querying for that interface. // Set it to read the video channel if it is one. auto pTuner = comutils::Ptr<IAMTVTuner>{}; hr = _pBuilder->FindInterface(nullptr, nullptr, pSrc.get(), IID_IAMTVTuner, AttachPtr(pTuner)); if (pTuner) { #ifdef DEBUG printf("directx_camera_server::open_and_find_parameters(): Found a TV " "Tuner!\n"); #endif // XXX Put code here. // Set the first input pin to use the cable as input hr = pTuner->put_InputType(0, TunerInputCable); if (FAILED(hr)) { fprintf(stderr, "directx_camera_server::open_and_find_parameters():" " Can't set input to cable\n"); } // Set the channel on the video to be baseband (is this channel zero?) hr = pTuner->put_Channel(0, -1, -1); if (FAILED(hr)) { fprintf(stderr, "directx_camera_server::open_and_find_parameters():" " Can't set channel\n"); } } //------------------------------------------------------------------- // Find _num_rows and _num_columns in the video stream. AM_MEDIA_TYPE mt = {0}; _pSampleGrabberWrapper->getConnectedMediaType(mt); VIDEOINFOHEADER *pVih; if (mt.formattype == FORMAT_VideoInfo || mt.formattype == FORMAT_VideoInfo2) { pVih = reinterpret_cast<VIDEOINFOHEADER *>(mt.pbFormat); } else { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): " "Can't get video header type\n"); fprintf(stderr, " (Expected %x or %x, got %x)\n", FORMAT_VideoInfo, FORMAT_VideoInfo2, mt.formattype); fprintf(stderr, " (GetConnectedMediaType is not valid for DirectX " "headers later than version 7)\n"); fprintf(stderr, " (We need to re-implement reading video in some " "other interface)\n"); return false; } // Number of rows and columns. This is different if we are using a target // rectangle (rcTarget) than if we are not. if (IsRectEmpty(&pVih->rcTarget)) { _num_columns = pVih->bmiHeader.biWidth; _num_rows = pVih->bmiHeader.biHeight; } else { _num_columns = pVih->rcTarget.right; _num_rows = pVih->bmiHeader.biHeight; printf("XXX directx_camera_server::open_and_find_parameters(): " "Warning: may not work correctly with target rectangle\n"); } #ifdef DEBUG printf("Got %dx%d video\n", _num_columns, _num_rows); #endif // Make sure that the image is not compressed and that we have 8 bits // per pixel. if (pVih->bmiHeader.biCompression != BI_RGB) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): " "Compression not RGB\n"); switch (pVih->bmiHeader.biCompression) { case BI_RLE8: fprintf(stderr, " (It is BI_RLE8)\n"); break; case BI_RLE4: fprintf(stderr, " (It is BI_RLE4)\n"); case BI_BITFIELDS: fprintf(stderr, " (It is BI_BITFIELDS)\n"); break; default: fprintf(stderr, " (Unknown compression type)\n"); } return false; } int BytesPerPixel = pVih->bmiHeader.biBitCount / 8; if (BytesPerPixel != 3) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): " "Not 3 bytes per pixel (%d)\n", pVih->bmiHeader.biBitCount); return false; } // A negative height indicates that the images are stored non-inverted in Y // Not sure what to do with images that have negative height -- need to // read the book some more to find out. if (_num_rows < 0) { fprintf(stderr, "directx_camera_server::open_and_find_parameters(): " "Num Rows is negative (internal error)\n"); return false; } // Find the stride to take when moving from one row of video to the // next. This is rounded up to the nearest DWORD. _stride = (_num_columns * BytesPerPixel + 3) & ~3; return true; }
inline comutils::Ptr<IMoniker> find_first_capture_device_where(F &&f) { auto ret = comutils::Ptr<IMoniker>{}; // Create the system device enumerator. #ifdef DEBUG printf("find_first_capture_device_where(): Before " "CoCreateInstance SystemDeviceEnum\n"); #endif auto pDevEnum = comutils::Ptr<ICreateDevEnum>{}; CoCreateInstance(CLSID_SystemDeviceEnum, nullptr, CLSCTX_INPROC, IID_ICreateDevEnum, AttachPtr(pDevEnum)); if (didConstructionFail(pDevEnum, "device enumerator")) { return ret; } // Create an enumerator for video capture devices. // https://msdn.microsoft.com/en-us/library/windows/desktop/dd407292(v=vs.85).aspx #ifdef DEBUG printf("find_first_capture_device_where(): Before " "CreateClassEnumerator\n"); #endif auto pClassEnum = comutils::Ptr<IEnumMoniker>{}; pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, AttachPtr(pClassEnum), 0); if (didConstructionFail(pClassEnum, "video enumerator (no cameras?)")) { return ret; } #ifdef DEBUG printf("find_first_capture_device_where(): Before Loop " "over enumerators\n"); #endif // see // https://msdn.microsoft.com/en-us/library/windows/desktop/dd377566(v=vs.85).aspx // for how to choose a camera #ifdef VERBOSE_ENUM printf("\ndirectx_camera_server find_first_capture_device_where(): " "Beginning enumeration of video capture devices.\n\n"); #endif auto pMoniker = comutils::Ptr<IMoniker>{}; while (pClassEnum->Next(1, AttachPtr(pMoniker), nullptr) == S_OK) { #ifdef VERBOSE_ENUM printf("- '%s' at path:\n '%s'\n\n", getDeviceHumanDesc(*pMoniker).c_str(), getDevicePath(*pMoniker).c_str()); #endif // VERBOSE_ENUM if (!ret && f(*pMoniker)) { ret = pMoniker; #ifdef VERBOSE_ENUM printf("^^ Accepted that device! (Would have exited " "enumeration here if VERBOSE_ENUM were not defined)\n\n"); #else // !VERBOSE_ENUM return ret; // Early out if we find it and we're not in verbose enum // mode. #endif } } #ifdef VERBOSE_ENUM printf("\ndirectx_camera_server find_first_capture_device_where(): End " "enumeration.\n\n"); #endif #ifdef DXCAMSERVER_VERBOSE if (!ret) { fprintf(stderr, "directx_camera_server find_first_capture_device_where(): " "No device satisfied the predicate.\n"); } #endif return ret; }
SkyRenderer::SkyRenderer(GraphicsDevice& graphicsDevice) : m_graphicsDevice(graphicsDevice) { if (!m_sharedWeakPtr.expired()) { m_shared = m_sharedWeakPtr.lock(); } else { m_shared = std::make_shared<SharedProperties>(); m_sharedWeakPtr = std::weak_ptr<SharedProperties>(m_shared); // // Create the vertex shader and input layout. // boost::intrusive_ptr<ID3D10Blob> bytecode, errors; D3DCHECK(D3DX11CompileFromFileA("assets/shaders/skybox_vs.hlsl", // pSrcFile NULL, // pDefines NULL, // pInclude "main", // pFunctionName "vs_4_0", // pProfile 0, // Flags1 0, // Flags2 NULL, // pPump AttachPtr(bytecode), // ppShader AttachPtr(errors), // ppErrorMsgs NULL)); // pHResult D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateVertexShader( bytecode->GetBufferPointer(), bytecode->GetBufferSize(), NULL, AttachPtr(m_shared->vertexShader))); D3D11_INPUT_ELEMENT_DESC inputElements[] = { { "POSITION", // SemanticName 0, // SemanticIndex DXGI_FORMAT_R32G32B32_FLOAT, // Format 0, // InputSlot 0, // AlignedByteOffset D3D11_INPUT_PER_VERTEX_DATA, // InputSlotClass 0 // InstanceDataStepRate }, { "TEXCOORD", // SemanticName 0, // SemanticIndex DXGI_FORMAT_R32G32_FLOAT, // Format 0, // InputSlot 12, // AlignedByteOffset D3D11_INPUT_PER_VERTEX_DATA, // InputSlotClass 0 // InstanceDataStepRate } }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateInputLayout( inputElements, sizeof(inputElements) / sizeof(inputElements[0]), bytecode->GetBufferPointer(), bytecode->GetBufferSize(), AttachPtr(m_shared->inputLayout))); // // Create the pixel shader. // D3DCHECK(D3DX11CompileFromFileA("assets/shaders/skybox_ps.hlsl", // pSrcFile NULL, // pDefines NULL, // pInclude "main", // pFunctionName "ps_4_0", // pProfile 0, // Flags1 0, // Flags2 NULL, // pPump AttachPtr(bytecode), // ppShader AttachPtr(errors), // ppErrorMsgs NULL)); // pHResult D3DCHECK(m_graphicsDevice.GetD3DDevice().CreatePixelShader( bytecode->GetBufferPointer(), bytecode->GetBufferSize(), NULL, AttachPtr(m_shared->pixelShader))); // // Create the renderer state objects. // D3D11_RASTERIZER_DESC rasterizerDesc = { D3D11_FILL_SOLID, // FillMode D3D11_CULL_NONE, // CullMode FALSE, // FrontCounterClockwise 0, // DepthBias 0.0f, // DepthBiasClamp 0.0f, // SlopeScaledDepthBias FALSE, // DepthClipEnable FALSE, // ScissorEnable FALSE, // MultisampleEnable FALSE // AntialiasedLineEnable }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateRasterizerState( &rasterizerDesc, AttachPtr(m_shared->rasterizerState))); D3D11_DEPTH_STENCIL_DESC depthStencilDesc = { TRUE, // DepthEnable D3D11_DEPTH_WRITE_MASK_ZERO, // DepthWriteMask D3D11_COMPARISON_LESS_EQUAL, // ComparisonFunc FALSE, // StencilEnable 0, // StencilReadMask 0, // StencilWriteMask { // FrontFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_NEVER, // StencilFunc }, { // BackFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_NEVER, // StencilFunc } }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateDepthStencilState( &depthStencilDesc, AttachPtr(m_shared->depthStencilState))); // // Create the vertex and index buffers. // D3D11_SUBRESOURCE_DATA vertexBufferData = { SkyboxVertices, // pSysMem 0, // SysMemPitch 0 // SysMemSlicePitch }; D3D11_BUFFER_DESC vertexBufferDesc = { sizeof(SkyboxVertices), // ByteWidth D3D11_USAGE_IMMUTABLE, // Usage D3D11_BIND_VERTEX_BUFFER, // BindFlags 0, // CPUAccessFlags 0, // MiscFlags 0 // StructureByteStride }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateBuffer(&vertexBufferDesc, &vertexBufferData, AttachPtr(m_shared->vertexBuffer))); D3D11_SUBRESOURCE_DATA indexBufferData = { SkyboxIndices, // pSysMem 0, // SysMemPitch 0 // SysMemSlicePitch }; D3D11_BUFFER_DESC indexBufferDesc = { sizeof(SkyboxIndices), // ByteWidth D3D11_USAGE_IMMUTABLE, // Usage D3D11_BIND_INDEX_BUFFER, // BindFlags 0, // CPUAccessFlags 0, // MiscFlags 0 // StructureByteStride }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateBuffer(&indexBufferDesc, &indexBufferData, AttachPtr(m_shared->indexBuffer))); // // Load the sky texture. // D3DCHECK(D3DX11CreateShaderResourceViewFromFileA(&m_graphicsDevice.GetD3DDevice(), "assets/textures/sky.jpg", NULL, NULL, AttachPtr(m_shared->skyTextureView), NULL)); // // Create the sampler state. // D3D11_SAMPLER_DESC samplerDesc = { D3D11_FILTER_MIN_MAG_MIP_LINEAR, // Filter D3D11_TEXTURE_ADDRESS_CLAMP, // AddressU D3D11_TEXTURE_ADDRESS_CLAMP, // AddressV D3D11_TEXTURE_ADDRESS_CLAMP, // AddressW 0.0f, // MaxLODBias 0, // MaxAnisotropy D3D11_COMPARISON_ALWAYS, // ComparisonFunc {0, 0, 0, 0}, // BorderColor -FLT_MAX, // MinLOD FLT_MAX // MaxLOD }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateSamplerState(&samplerDesc, AttachPtr(m_shared->samplerState))); } // // Create the constant buffer. // D3D11_BUFFER_DESC constantBufferDesc = { sizeof(ShaderConstants), // ByteWidth D3D11_USAGE_DYNAMIC, // Usage D3D11_BIND_CONSTANT_BUFFER, // BindFlags D3D11_CPU_ACCESS_WRITE, // CPUAccessFlags 0, // MiscFlags 0 // StructureByteSize }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateBuffer( &constantBufferDesc, NULL, AttachPtr(m_constantBuffer))); }
VoxelRenderer::VoxelRenderer(GraphicsDevice& graphicsDevice) : m_graphicsDevice(graphicsDevice) { if (!m_sharedWeakPtr.expired()) { m_shared = m_sharedWeakPtr.lock(); } else { m_shared = std::make_shared<SharedProperties>(); m_sharedWeakPtr = std::weak_ptr<SharedProperties>(m_shared); // // Create the vertex shader and input layout. // boost::intrusive_ptr<ID3D10Blob> bytecode, errors; D3DCHECK(D3DX11CompileFromFileA("assets/shaders/voxel_mesh_vs.hlsl",// pSrcFile NULL, // pDefines NULL, // pInclude "main", // pFunctionName "vs_4_0", // pProfile 0, // Flags1 0, // Flags2 NULL, // pPump AttachPtr(bytecode), // ppShader AttachPtr(errors), // ppErrorMsgs NULL)); // pHResult D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateVertexShader( bytecode->GetBufferPointer(), bytecode->GetBufferSize(), NULL, AttachPtr(m_shared->vertexShader))); D3D11_INPUT_ELEMENT_DESC inputElements[] = { { "POSITION", // SemanticName 0, // SemanticIndex DXGI_FORMAT_R32G32B32_FLOAT, // Format 0, // InputSlot 0, // AlignedByteOffset D3D11_INPUT_PER_VERTEX_DATA, // InputSlotClass 0 // InstanceDataStepRate }, { "NORMAL", // SemanticName 0, // SemanticIndex DXGI_FORMAT_R32_UINT, // Format 0, // InputSlot 12, // AlignedByteOffset D3D11_INPUT_PER_VERTEX_DATA, // InputSlotClass 0 // InstanceDataStepRate }, { "TEXCOORD", // SemanticName 0, // SemanticIndex DXGI_FORMAT_R32G32_UINT, // Format 0, // InputSlot 16, // AlignedByteOffset D3D11_INPUT_PER_VERTEX_DATA, // InputSlotClass 0 // InstanceDataStepRate } }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateInputLayout( inputElements, sizeof(inputElements) / sizeof(inputElements[0]), bytecode->GetBufferPointer(), bytecode->GetBufferSize(), AttachPtr(m_shared->inputLayout))); // // Create the pixel shader. // D3DCHECK(D3DX11CompileFromFileA("assets/shaders/voxel_mesh_ps.hlsl",// pSrcFile NULL, // pDefines NULL, // pInclude "main", // pFunctionName "ps_4_0", // pProfile 0, // Flags1 0, // Flags2 NULL, // pPump AttachPtr(bytecode), // ppShader AttachPtr(errors), // ppErrorMsgs NULL)); // pHResult D3DCHECK(m_graphicsDevice.GetD3DDevice().CreatePixelShader( bytecode->GetBufferPointer(), bytecode->GetBufferSize(), NULL, AttachPtr(m_shared->pixelShader))); // // Create the state objects. // D3D11_DEPTH_STENCIL_DESC depthStencilDesc = { TRUE, // DepthEnable D3D11_DEPTH_WRITE_MASK_ALL, // DepthWriteMask D3D11_COMPARISON_LESS, // ComparisonFunc TRUE, // StencilEnable 0, // StencilReadMask 0xFF, // StencilWriteMask { // FrontFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_INCR_SAT, // StencilPassOp D3D11_COMPARISON_ALWAYS, // StencilFunc }, { // BackFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_NEVER, // StencilFunc } }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateDepthStencilState( &depthStencilDesc, AttachPtr(m_shared->depthStencilState))); D3D11_DEPTH_STENCIL_DESC transparentDepthStencilDesc = { TRUE, // DepthEnable D3D11_DEPTH_WRITE_MASK_ZERO, // DepthWriteMask D3D11_COMPARISON_LESS, // ComparisonFunc FALSE, // StencilEnable 0, // StencilReadMask 0, // StencilWriteMask { // FrontFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_ALWAYS, // StencilFunc }, { // BackFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_NEVER, // StencilFunc } }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateDepthStencilState( &depthStencilDesc, AttachPtr(m_shared->transparentDepthStencilState))); D3D11_DEPTH_STENCIL_DESC fillGapsDepthStencilDesc = { TRUE, // DepthEnable D3D11_DEPTH_WRITE_MASK_ALL, // DepthWriteMask D3D11_COMPARISON_LESS, // ComparisonFunc TRUE, // StencilEnable 0xFF, // StencilReadMask 0, // StencilWriteMask { // FrontFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_EQUAL // StencilFunc }, { // BackFace D3D11_STENCIL_OP_KEEP, // StencilFailOp D3D11_STENCIL_OP_KEEP, // StencilDepthFailOp D3D11_STENCIL_OP_KEEP, // StencilPassOp D3D11_COMPARISON_NEVER // StencilFunc } }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateDepthStencilState(&fillGapsDepthStencilDesc, AttachPtr(m_shared->fillGapsDepthStencilState))); D3D11_BLEND_DESC blendDesc = { FALSE, // AlphaToCoverageEnable FALSE, // IndependentBlendEnable {{ // RenderTarget[0] TRUE, // BlendEnable D3D11_BLEND_SRC_ALPHA, // SrcBlend D3D11_BLEND_INV_SRC_ALPHA, // DestBlend D3D11_BLEND_OP_ADD, // BlendOp D3D11_BLEND_ZERO, // SrcBlendAlpha D3D11_BLEND_ZERO, // DestBlendAlpha D3D11_BLEND_OP_ADD, // BlendOpAlpha D3D11_COLOR_WRITE_ENABLE_ALL // RenderTargetWriteMask }} }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateBlendState(&blendDesc, AttachPtr(m_shared->blendState))); // // Load the textures. // const char* paths[] = { "assets/textures/rock.jpg", "assets/textures/dark_grass.png", "assets/textures/Grass_1.png", "assets/textures/dirt.jpg", "assets/textures/pjrock21.jpg", "assets/textures/darkrock.png", "assets/textures/sand.png" }; for (size_t i = 0; i < sizeof(paths) / sizeof(paths[0]); i++) { D3DCHECK(D3DX11CreateShaderResourceViewFromFileA(&m_graphicsDevice.GetD3DDevice(), paths[i], NULL, NULL, AttachPtr(m_shared->textureViews[i]), NULL)); } // // Create the sampler state. // D3D11_SAMPLER_DESC samplerDesc = { D3D11_FILTER_MIN_MAG_MIP_LINEAR, // Filter D3D11_TEXTURE_ADDRESS_WRAP, // AddressU D3D11_TEXTURE_ADDRESS_WRAP, // AddressV D3D11_TEXTURE_ADDRESS_WRAP, // AddressW 0.0f, // MaxLODBias 0, // MaxAnisotropy D3D11_COMPARISON_ALWAYS, // ComparisonFunc {0, 0, 0, 0}, // BorderColor -FLT_MAX, // MinLOD FLT_MAX // MaxLOD }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateSamplerState(&samplerDesc, AttachPtr(m_shared->samplerState))); } // // Create the constant buffer. // D3D11_BUFFER_DESC constantBufferDesc = { sizeof(ShaderConstants), // ByteWidth D3D11_USAGE_DYNAMIC, // Usage D3D11_BIND_CONSTANT_BUFFER, // BindFlags D3D11_CPU_ACCESS_WRITE, // CPUAccessFlags 0, // MiscFlags 0 // StructureByteSize }; D3DCHECK(m_graphicsDevice.GetD3DDevice().CreateBuffer( &constantBufferDesc, NULL, AttachPtr(m_constantBuffer))); }